lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From synhers...@apache.org
Subject [03/50] [abbrv] lucenenet git commit: Cleaned up Suggest test code
Date Sun, 02 Oct 2016 14:35:21 GMT
Cleaned up Suggest test code


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/d897e714
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/d897e714
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/d897e714

Branch: refs/heads/master
Commit: d897e71462af2c96d846e6194926bc76c503ab5f
Parents: 1153dc1
Author: Shad Storhaug <shad@shadstorhaug.com>
Authored: Thu Sep 15 00:41:34 2016 +0700
Committer: Shad Storhaug <shad@shadstorhaug.com>
Committed: Sun Oct 2 17:43:58 2016 +0700

----------------------------------------------------------------------
 .../Spell/TestSpellChecker.cs                   |   2 +-
 .../Analyzing/AnalyzingInfixSuggesterTest.cs    |  82 +-----
 .../Suggest/Analyzing/AnalyzingSuggesterTest.cs | 275 +------------------
 .../Suggest/Analyzing/FuzzySuggesterTest.cs     | 170 +-----------
 .../Suggest/Analyzing/TestFreeTextSuggester.cs  | 102 -------
 .../Suggest/DocumentDictionaryTest.cs           |   7 -
 .../DocumentValueSourceDictionaryTest.cs        |   5 -
 .../Suggest/FileDictionaryTest.cs               |  20 +-
 .../Suggest/Fst/FSTCompletionTest.cs            |   2 -
 .../Suggest/Fst/WFSTCompletionTest.cs           |  25 +-
 .../Suggest/LookupBenchmarkTest.cs              |  44 +--
 .../Suggest/PersistenceTest.cs                  |   2 -
 12 files changed, 31 insertions(+), 705 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d897e714/src/Lucene.Net.Tests.Suggest/Spell/TestSpellChecker.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Suggest/Spell/TestSpellChecker.cs b/src/Lucene.Net.Tests.Suggest/Spell/TestSpellChecker.cs
index 0011455..532c2a0 100644
--- a/src/Lucene.Net.Tests.Suggest/Spell/TestSpellChecker.cs
+++ b/src/Lucene.Net.Tests.Suggest/Spell/TestSpellChecker.cs
@@ -87,7 +87,7 @@ namespace Lucene.Net.Search.Spell
         public override void TearDown()
         {
             userindex.Dispose();
-            if (!spellChecker.IsDisposed) // LUCENENET TODO: Change this property to IsDisposed
+            if (!spellChecker.IsDisposed)
                 spellChecker.Dispose();
             spellindex.Dispose();
             base.TearDown();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d897e714/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs
index 305f02f..dc12951 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingInfixSuggesterTest.cs
@@ -199,52 +199,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
 
             Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
             AnalyzingInfixSuggester suggester = new TestHighlightAnalyzingInfixSuggester(this, a);
-            //AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), a, a, 3) {
-
-            //        protected override object highlight(string text, ISet<string> matchedTokens, string prefixToken) 
-            //{
-            //    TokenStream ts = queryAnalyzer.tokenStream("text", new StringReader(text));
-            //          try {
-            //            CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
-            //            OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class);
-            //            ts.reset();
-            //            List<LookupHighlightFragment> fragments = new ArrayList<>();
-            //int upto = 0;
-            //            while (ts.incrementToken()) {
-            //              String token = termAtt.toString();
-            //int startOffset = offsetAtt.startOffset();
-            //int endOffset = offsetAtt.endOffset();
-            //              if (upto<startOffset) {
-            //                fragments.add(new LookupHighlightFragment(text.substring(upto, startOffset), false));
-            //                upto = startOffset;
-            //              } else if (upto > startOffset) {
-            //                continue;
-            //              }
-
-            //              if (matchedTokens.contains(token)) {
-            //                // Token matches.
-            //                fragments.add(new LookupHighlightFragment(text.substring(startOffset, endOffset), true));
-            //                upto = endOffset;
-            //              } else if (prefixToken != null && token.startsWith(prefixToken)) {
-            //                fragments.add(new LookupHighlightFragment(text.substring(startOffset, startOffset+prefixToken.length()), true));
-            //                if (prefixToken.length() < token.length()) {
-            //                  fragments.add(new LookupHighlightFragment(text.substring(startOffset+prefixToken.length(), startOffset+token.length()), false));
-            //                }
-            //                upto = endOffset;
-            //              }
-            //            }
-            //            ts.end();
-            //            int endOffset = offsetAtt.endOffset();
-            //            if (upto<endOffset) {
-            //              fragments.add(new LookupHighlightFragment(text.substring(upto), false));
-            //            }
-
-            //            return fragments;
-            //          } finally {
-            //            IOUtils.closeWhileHandlingException(ts);
-            //          }
-            //        }
-            //      };
+
             suggester.Build(new InputArrayIterator(keys));
 
             IList<Lookup.LookupResult> results = suggester.DoLookup(TestUtil.StringToCharSequence("ear", Random()).ToString(), 10, true, true);
@@ -415,15 +370,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             // Try again, but overriding addPrefixMatch to highlight
             // the entire hit:
             suggester = new TestHighlightChangeCaseAnalyzingInfixSuggester(this, a);
-            //    suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), a, a, 3) {
-            //        @Override
-            //        protected void addPrefixMatch(StringBuilder sb, String surface, String analyzed, String prefixToken)
-            //{
-            //    sb.append("<b>");
-            //    sb.append(surface);
-            //    sb.append("</b>");
-            //}
-            //      };
+
             suggester.Build(new InputArrayIterator(keys));
             results = suggester.DoLookup(TestUtil.StringToCharSequence("penn", Random()).ToString(), 10, true, true);
             assertEquals(1, results.size());
@@ -486,29 +433,8 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         {
             CharArraySet stopWords = StopFilter.MakeStopSet(TEST_VERSION_CURRENT, "a");
             Analyzer indexAnalyzer = new TestSuggestStopFilterAnalyzer1(this, stopWords);
-            //    Analyzer indexAnalyzer = new Analyzer()
-            //{
-            //    @Override
-            //        protected TokenStreamComponents createComponents(String fieldName, Reader reader)
-            //{
-            //    MockTokenizer tokens = new MockTokenizer(reader);
-            //    return new TokenStreamComponents(tokens,
-            //                                     new StopFilter(TEST_VERSION_CURRENT, tokens, stopWords));
-            //}
-            //      };
-
             Analyzer queryAnalyzer = new TestSuggestStopFilterAnalyzer2(this, stopWords);
 
-            //    Analyzer queryAnalyzer = new Analyzer() {
-            //        @Override
-            //        protected TokenStreamComponents createComponents(String fieldName, Reader reader)
-            //{
-            //    MockTokenizer tokens = new MockTokenizer(reader);
-            //    return new TokenStreamComponents(tokens,
-            //                                     new SuggestStopFilter(tokens, stopWords));
-            //}
-            //      };
-
             AnalyzingInfixSuggester suggester = new AnalyzingInfixSuggester(TEST_VERSION_CURRENT, NewDirectory(), indexAnalyzer, queryAnalyzer, 3);
 
             Input[] keys = new Input[] {
@@ -943,12 +869,12 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 {
                     b.Append(' ');
                 }
-                String inputTerm = inputTerms[i];
+                string inputTerm = inputTerms[i];
                 //System.out.println("  inputTerm=" + inputTerm);
                 bool matched = false;
                 for (int j = 0; j < queryTerms.Length; j++)
                 {
-                    String queryTerm = queryTerms[j];
+                    string queryTerm = queryTerms[j];
                     //System.out.println("    queryTerm=" + queryTerm);
                     if (j < queryTerms.Length - 1 || lastPrefix == false)
                     {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d897e714/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs
index f71836c..b80c81d 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs
@@ -148,7 +148,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             for (int i = 0; i < howMany; i++)
             {
                 Document nextDoc = lineFile.NextDoc();
-                String title = nextDoc.GetField("title").StringValue;
+                string title = nextDoc.GetField("title").StringValue;
                 int randomWeight = Random().nextInt(100);
                 keys.Add(new Input(title, randomWeight));
                 if (!mapping.ContainsKey(title) || mapping[title] < randomWeight)
@@ -340,55 +340,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         {
             Analyzer analyzer = new TestGraphDupsAnalyzer(this);
 
-            //   Analyzer analyzer = new Analyzer()
-            //{
-            //    @Override
-            //      protected TokenStreamComponents createComponents(String fieldName, Reader reader)
-            //{
-            //    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
-
-            //    return new TokenStreamComponents(tokenizer) {
-            //          int tokenStreamCounter = 0;
-            //    TokenStream[] tokenStreams = new TokenStream[] {
-            //            new CannedTokenStream(new Token[] {
-            //                token("wifi",1,1),
-            //                token("hotspot",0,2),
-            //                token("network",1,1),
-            //                token("is",1,1),
-            //                token("slow",1,1)
-            //              }),
-            //            new CannedTokenStream(new Token[] {
-            //                token("wi",1,1),
-            //                token("hotspot",0,3),
-            //                token("fi",1,1),
-            //                token("network",1,1),
-            //                token("is",1,1),
-            //                token("fast",1,1)
-
-            //              }),
-            //            new CannedTokenStream(new Token[] {
-            //                token("wifi",1,1),
-            //                token("hotspot",0,2),
-            //                token("network",1,1)
-            //              }),
-            //          };
-
-            //    @Override
-            //          public TokenStream getTokenStream()
-            //{
-            //    TokenStream result = tokenStreams[tokenStreamCounter];
-            //    tokenStreamCounter++;
-            //    return result;
-            //}
-
-            //@Override
-            //          protected void setReader(final Reader reader) 
-            //{
-            //}
-            //        };
-            //      }
-            //    };
-
             Input[] keys = new Input[] {
                 new Input("wifi network is slow", 50),
                 new Input("wi fi network is fast", 10),
@@ -432,7 +383,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             public TestInputPathRequiredTokenStreamComponents(AnalyzingSuggesterTest outerInstance, Tokenizer tokenizer)
                 : base(tokenizer)
             {
-
+                this.outerInstance = outerInstance;
             }
 
             public override TokenStream TokenStream
@@ -483,48 +434,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
 
             Analyzer analyzer = new TestInputPathRequiredAnalyzer(this);
 
-            //        Analyzer analyzer = new Analyzer()
-            //{
-            //    @Override
-            //      protected TokenStreamComponents createComponents(String fieldName, Reader reader)
-            //{
-            //    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
-
-            //    return new TokenStreamComponents(tokenizer) {
-            //          int tokenStreamCounter = 0;
-            //     TokenStream[] tokenStreams = new TokenStream[] {
-            //            new CannedTokenStream(new Token[] {
-            //                token("ab",1,1),
-            //                token("ba",0,1),
-            //                token("xc",1,1)
-            //              }),
-            //            new CannedTokenStream(new Token[] {
-            //                token("ba",1,1),
-            //                token("xd",1,1)
-            //              }),
-            //            new CannedTokenStream(new Token[] {
-            //                token("ab",1,1),
-            //                token("ba",0,1),
-            //                token("x",1,1)
-            //              })
-            //          };
-
-            //    @Override
-            //          public TokenStream getTokenStream()
-            //{
-            //    TokenStream result = tokenStreams[tokenStreamCounter];
-            //    tokenStreamCounter++;
-            //    return result;
-            //}
-
-            //@Override
-            //          protected void setReader(final Reader reader) throws IOException
-            //{
-            //}
-            //        };
-            //      }
-            //    };
-
             Input[] keys = new Input[] {
                 new Input("ab xc", 50),
                 new Input("ba xd", 50),
@@ -586,16 +495,16 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                     if (count++ != 3)
                     {
                         return new CannedTokenStream(new Token[] {
-                  NewToken("a", 1, 1),
-                });
+                            NewToken("a", 1, 1),
+                        });
                     }
                     else
                     {
                         // After that "a b":
                         return new CannedTokenStream(new Token[] {
-                  NewToken("a", 1, 1),
-                  NewToken("b", 1, 1),
-                });
+                            NewToken("a", 1, 1),
+                            NewToken("b", 1, 1),
+                        });
                     }
                 }
             }
@@ -625,44 +534,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         private Analyzer GetUnusualAnalyzer()
         {
             return new UsualAnalyzer(this);
-            //    return new Analyzer() {
-            //      @Override
-            //      protected TokenStreamComponents createComponents(String fieldName, Reader reader)
-            //{
-            //    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
-
-            //    return new TokenStreamComponents(tokenizer) {
-
-            //          int count;
-
-            //    @Override
-            //          public TokenStream getTokenStream()
-            //{
-            //    // 4th time we are called, return tokens a b,
-            //    // else just a:
-            //    if (count++ != 3)
-            //    {
-            //        return new CannedTokenStream(new Token[] {
-            //                  token("a", 1, 1),
-            //                });
-            //    }
-            //    else
-            //    {
-            //        // After that "a b":
-            //        return new CannedTokenStream(new Token[] {
-            //                  token("a", 1, 1),
-            //                  token("b", 1, 1),
-            //                });
-            //    }
-            //}
-
-            //@Override
-            //          protected void setReader(final Reader reader) throws IOException
-            //{
-            //}
-            //        };
-            //      }
-            //    };
         }
 
         [Test]
@@ -895,12 +766,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         {
             public int Compare(TermFreq2 left, TermFreq2 right)
             {
-                // LUCENENET TODO: Work out how to cast long to float in the same way Java does
-                // http://stackoverflow.com/q/1293819/181087
-                //int cmp = Float.compare(right.weight, left.weight);
-                // LUCENENET NOTE: It shouldn't matter that the decimal place is not correct here,
-                // since all we care about is the relative difference between the numbers. Hopefully,
-                // the loss of precision is equivalent between Java and .NET...
                 int cmp = ((float)right.weight).CompareTo((float)left.weight);
                 if (cmp == 0)
                 {
@@ -993,7 +858,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                         }
                     }
 
-                    //analyzedKey = analyzedKey.replaceAll("(^|" + SEP + ")" + SEP + "$", "");
                     analyzedKey = Regex.Replace(analyzedKey, "(^|" + SEP + ")" + SEP + "$", "");
 
                     if (preserveSep && lastRemoved)
@@ -1037,7 +901,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 // Don't just sort original list, to avoid VERBOSE
                 // altering the test:
                 List<TermFreq2> sorted = new List<TermFreq2>(slowCompletor);
-                //Collections.sort(sorted);
                 sorted.Sort();
                 foreach (TermFreq2 ent in sorted)
                 {
@@ -1105,7 +968,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 // issue open for this):
                 while (true)
                 {
-                    //string s = analyzedKey.replaceAll(SEP + "$", "");
                     string s = Regex.Replace(analyzedKey, SEP + "$", "");
                     if (s.equals(analyzedKey))
                     {
@@ -1145,22 +1007,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 if (matches.size() > 1)
                 {
                     matches.Sort(new TestRandomComparator());
-
-                    //        Collections.sort(matches, new Comparator<TermFreq2>() {
-                    //            @Override
-                    //            public int compare(TermFreq2 left, TermFreq2 right)
-                    //{
-                    //    int cmp = Float.compare(right.weight, left.weight);
-                    //    if (cmp == 0)
-                    //    {
-                    //        return left.analyzedForm.compareTo(right.analyzedForm);
-                    //    }
-                    //    else
-                    //    {
-                    //        return cmp;
-                    //    }
-                    //}
-                    //          });
                 }
 
                 if (matches.size() > topN)
@@ -1327,33 +1173,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         {
             Analyzer a = new TestDupSurfaceFormsMissingResultsAnalyzer(this);
 
-            //    Analyzer a = new Analyzer()
-            //{
-            //    @Override
-            //      protected TokenStreamComponents createComponents(String fieldName, TextReader reader)
-            //{
-            //    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
-
-            //    return new TokenStreamComponents(tokenizer) {
-
-            //          @Override
-            //          public TokenStream getTokenStream()
-            //{
-            //    return new CannedTokenStream(new Token[] {
-            //                token("hairy", 1, 1),
-            //                token("smelly", 0, 1),
-            //                token("dog", 1, 1),
-            //              });
-            //}
-
-            //@Override
-            //          protected void setReader(Reader reader) 
-            //{
-            //}
-            //        };
-            //      }
-            //    };
-
             AnalyzingSuggester suggester = new AnalyzingSuggester(a, a, 0, 256, -1, true);
 
             suggester.Build(new InputArrayIterator(Shuffle(
@@ -1440,46 +1259,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         {
             Analyzer a = new TestDupSurfaceFormsMissingResults2Analyzer();
 
-            //    Analyzer a = new Analyzer()
-            //{
-            //    @Override
-            //      protected TokenStreamComponents createComponents(string fieldName, TextReader reader)
-            //{
-            //    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
-
-            //    return new TokenStreamComponents(tokenizer) {
-
-            //          int count;
-
-            //    @Override
-            //          public TokenStream getTokenStream()
-            //{
-            //    if (count == 0)
-            //    {
-            //        count++;
-            //        return new CannedTokenStream(new Token[] {
-            //                  token("p", 1, 1),
-            //                  token("q", 1, 1),
-            //                  token("r", 0, 1),
-            //                  token("s", 0, 1),
-            //                });
-            //    }
-            //    else
-            //    {
-            //        return new CannedTokenStream(new Token[] {
-            //                  token("p", 1, 1),
-            //                });
-            //    }
-            //}
-
-            //@Override
-            //          protected void setReader(TextReader reader) 
-            //{
-            //}
-            //        };
-            //      }
-            //    };
-
             AnalyzingSuggester suggester = new AnalyzingSuggester(a, a, 0, 256, -1, true);
 
             suggester.Build(new InputArrayIterator(new Input[] {
@@ -1569,45 +1348,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         public void Test0ByteKeys()
         {
             Analyzer a = new Test0ByteKeysAnalyzer();
-            //    Analyzer a = new Analyzer()
-            //{
-            //    @Override
-            //        protected TokenStreamComponents createComponents(String fieldName, TextReader reader)
-            //{
-            //    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
-
-            //    return new TokenStreamComponents(tokenizer) {
-            //            int tokenStreamCounter = 0;
-            //     TokenStream[] tokenStreams = new TokenStream[] {
-            //              new CannedBinaryTokenStream(new BinaryToken[] {
-            //                  token(new BytesRef(new byte[] {0x0, 0x0, 0x0})),
-            //                }),
-            //              new CannedBinaryTokenStream(new BinaryToken[] {
-            //                  token(new BytesRef(new byte[] {0x0, 0x0})),
-            //                }),
-            //              new CannedBinaryTokenStream(new BinaryToken[] {
-            //                  token(new BytesRef(new byte[] {0x0, 0x0, 0x0})),
-            //                }),
-            //              new CannedBinaryTokenStream(new BinaryToken[] {
-            //                  token(new BytesRef(new byte[] {0x0, 0x0})),
-            //                }),
-            //            };
-
-            //    @Override
-            //            public TokenStream getTokenStream()
-            //{
-            //    TokenStream result = tokenStreams[tokenStreamCounter];
-            //    tokenStreamCounter++;
-            //    return result;
-            //}
-
-            //@Override
-            //            protected void setReader(final Reader reader) throws IOException
-            //{
-            //}
-            //          };
-            //        }
-            //      };
 
             AnalyzingSuggester suggester = new AnalyzingSuggester(a, a, 0, 256, -1, true);
 
@@ -1725,7 +1465,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             {
                 asList.Add(value);
             }
-            //Collections.shuffle(asList, random());
             asList = CollectionsHelper.Shuffle(asList);
             return asList;
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d897e714/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs
index 576f09d..38dca2c 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs
@@ -287,54 +287,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         public void TestGraphDups()
         {
             Analyzer analyzer = new TestGraphDupsAnalyzer(this);
-            //                Analyzer analyzer = new Analyzer()
-            //{
-            //    @Override
-            //      protected TokenStreamComponents createComponents(String fieldName, Reader reader)
-            //{
-            //    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
-
-            //    return new TokenStreamComponents(tokenizer) {
-            //          int tokenStreamCounter = 0;
-            //    final TokenStream[] tokenStreams = new TokenStream[] {
-            //            new CannedTokenStream(new Token[] {
-            //                token("wifi",1,1),
-            //                token("hotspot",0,2),
-            //                token("network",1,1),
-            //                token("is",1,1),
-            //                token("slow",1,1)
-            //              }),
-            //            new CannedTokenStream(new Token[] {
-            //                token("wi",1,1),
-            //                token("hotspot",0,3),
-            //                token("fi",1,1),
-            //                token("network",1,1),
-            //                token("is",1,1),
-            //                token("fast",1,1)
-
-            //              }),
-            //            new CannedTokenStream(new Token[] {
-            //                token("wifi",1,1),
-            //                token("hotspot",0,2),
-            //                token("network",1,1)
-            //              }),
-            //          };
-
-            //    @Override
-            //          public TokenStream getTokenStream()
-            //{
-            //    TokenStream result = tokenStreams[tokenStreamCounter];
-            //    tokenStreamCounter++;
-            //    return result;
-            //}
-
-            //@Override
-            //          protected void setReader(final Reader reader) 
-            //{
-            //}
-            //        };
-            //      }
-            //    };
 
             Input[] keys = new Input[] {
                 new Input("wifi network is slow", 50),
@@ -389,7 +341,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             public TestInputPathRequiredTokenStreamComponents(FuzzySuggesterTest outerInstance, Tokenizer tokenizer)
                 : base(tokenizer)
             {
-
+                this.outerInstance = outerInstance;
             }
 
             public override TokenStream TokenStream
@@ -439,47 +391,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             //  synonym module 
 
             Analyzer analyzer = new TestInputPathRequiredAnalyzer(this);
-            //    Analyzer analyzer = new Analyzer()
-            //{
-            //    @Override
-            //      protected TokenStreamComponents createComponents(String fieldName, Reader reader)
-            //{
-            //    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
-
-            //    return new TokenStreamComponents(tokenizer) {
-            //          int tokenStreamCounter = 0;
-            //    final TokenStream[] tokenStreams = new TokenStream[] {
-            //            new CannedTokenStream(new Token[] {
-            //                token("ab",1,1),
-            //                token("ba",0,1),
-            //                token("xc",1,1)
-            //              }),
-            //            new CannedTokenStream(new Token[] {
-            //                token("ba",1,1),
-            //                token("xd",1,1)
-            //              }),
-            //            new CannedTokenStream(new Token[] {
-            //                token("ab",1,1),
-            //                token("ba",0,1),
-            //                token("x",1,1)
-            //              })
-            //          };
-
-            //    @Override
-            //          public TokenStream getTokenStream()
-            //{
-            //    TokenStream result = tokenStreams[tokenStreamCounter];
-            //    tokenStreamCounter++;
-            //    return result;
-            //}
-
-            //@Override
-            //          protected void setReader(final Reader reader) 
-            //{
-            //}
-            //        };
-            //      }
-            //    };
 
             Input[] keys = new Input[] {
                 new Input("ab xc", 50),
@@ -576,44 +487,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         private Analyzer GetUnusualAnalyzer()
         {
             return new UsualAnalyzer(this);
-            //    return new Analyzer() {
-            //      @Override
-            //      protected TokenStreamComponents createComponents(String fieldName, Reader reader)
-            //{
-            //    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
-
-            //    return new TokenStreamComponents(tokenizer) {
-
-            //          int count;
-
-            //    @Override
-            //          public TokenStream getTokenStream()
-            //{
-            //    // 4th time we are called, return tokens a b,
-            //    // else just a:
-            //    if (count++ != 3)
-            //    {
-            //        return new CannedTokenStream(new Token[] {
-            //                  token("a", 1, 1),
-            //                });
-            //    }
-            //    else
-            //    {
-            //        // After that "a b":
-            //        return new CannedTokenStream(new Token[] {
-            //                  token("a", 1, 1),
-            //                  token("b", 1, 1),
-            //                });
-            //    }
-            //}
-
-            //@Override
-            //          protected void setReader(final Reader reader) 
-            //{
-            //}
-            //        };
-            //      }
-            //    };
         }
 
         [Test]
@@ -835,12 +708,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         {
             public int Compare(Lookup.LookupResult left, Lookup.LookupResult right)
             {
-                // LUCENENET TODO: Work out how to cast long to float in the same way Java does
-                // http://stackoverflow.com/q/1293819/181087
-                //int cmp = Float.compare(right.weight, left.weight);
-                // LUCENENET NOTE: It shouldn't matter that the decimal place is not correct here,
-                // since all we care about is the relative difference between the numbers. Hopefully,
-                // the loss of precision is equivalent between Java and .NET...
                 int cmp = ((float)right.value).CompareTo((float)left.value);
                 if (cmp == 0)
                 {
@@ -923,7 +790,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                         }
                     }
 
-                    //analyzedKey = analyzedKey.replaceAll("(^| )\u0000$", "");
                     analyzedKey = Regex.Replace(analyzedKey, "(^| )\u0000$", "");
 
                     if (preserveSep && lastRemoved)
@@ -955,7 +821,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 // Don't just sort original list, to avoid VERBOSE
                 // altering the test:
                 List<TermFreqPayload2> sorted = new List<TermFreqPayload2>(slowCompletor);
-                //Collections.sort(sorted);
                 sorted.Sort();
                 foreach (TermFreqPayload2 ent in sorted)
                 {
@@ -1016,9 +881,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 // issue open for this):
                 while (true)
                 {
-                    //String s = analyzedKey.replaceAll("(^| )\u0000$", "");
                     string s = Regex.Replace(analyzedKey, "(^| )\u0000$", "");
-                    //s = s.replaceAll("\\s+$", "");
                     s = Regex.Replace(s, "\\s+$", "");
                     if (s.Equals(analyzedKey))
                     {
@@ -1088,23 +951,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
 
                 if (matches.size() > 1)
                 {
-
                     matches.Sort(new TestRandomComparator());
-                    //        Collections.sort(matches, new Comparator<Lookup.LookupResult>() {
-                    //            @Override
-                    //            public int compare(Lookup.LookupResult left, Lookup.LookupResult right)
-                    //{
-                    //    int cmp = Float.compare(right.value, left.value);
-                    //    if (cmp == 0)
-                    //    {
-                    //        return left.compareTo(right);
-                    //    }
-                    //    else
-                    //    {
-                    //        return cmp;
-                    //    }
-                    //}
-                    //          });
                 }
 
                 if (matches.size() > topN)
@@ -1150,7 +997,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 new Input(" a", 60),
             });
 
-            //Collections.shuffle(keys, Random());
             keys = CollectionsHelper.Shuffle(keys);
             suggester.Build(new InputArrayIterator(keys));
 
@@ -1175,7 +1021,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 new Input("barbazfoo", 10),
             });
 
-            //Collections.shuffle(keys, Random());
             keys = CollectionsHelper.Shuffle(keys);
             suggester.Build(new InputArrayIterator(keys));
 
@@ -1186,7 +1031,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         }
 
 
-        //@SuppressWarnings("fallthrough")
         private static string AddRandomEdit(string @string, int prefixLength)
         {
             char[] input = @string.ToCharArray();
@@ -1290,13 +1134,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
 
             answers.Sort(new TestRandom2Comparator());
 
-            //    Collections.sort(answers, new Comparator<Input>() {
-            //        @Override
-            //        public int compare(Input a, Input b)
-            //{
-            //    return a.term.compareTo(b.term);
-            //}
-            //      });
             if (VERBOSE)
             {
                 Console.WriteLine("\nTEST: targets");
@@ -1319,7 +1156,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                 Console.WriteLine("TEST: maxEdits=" + maxEdits + " prefixLen=" + prefixLen + " transpositions=" + transpositions + " num=" + NUM);
             }
 
-            //Collections.shuffle(answers, Random());
             answers = new List<Input>(CollectionsHelper.Shuffle(answers));
             suggest.Build(new InputArrayIterator(answers.ToArray()));
 
@@ -1350,7 +1186,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                     }
                 }
 
-                //Collections.sort(actual, new CompareByCostThenAlpha());
                 actual.Sort(new CompareByCostThenAlpha());
 
                 int limit = Math.Min(expected.size(), actual.size());
@@ -1435,7 +1270,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                     }
                 }
 
-                //Collections.sort(results, new CompareByCostThenAlpha());
                 results.Sort(new CompareByCostThenAlpha());
             }
 
@@ -1513,7 +1347,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             otherPoints = ToIntsRef(other);
             n = targetPoints.Length;
             int m = otherPoints.Length;
-            //d = new int[n + 1][m + 1];
+
             d = ReturnRectangularIntArray(n + 1, m + 1);
 
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d897e714/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs
index 7add802..6781882 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/TestFreeTextSuggester.cs
@@ -275,16 +275,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         {
             // Just deletes "of"
             Analyzer a = new TestEndingHoleAnalyzer();
-            //    Analyzer a = new Analyzer()
-            //{
-            //    @Override
-            //        public TokenStreamComponents createComponents(string field, Reader reader)
-            //{
-            //    Tokenizer tokenizer = new MockTokenizer(reader);
-            //    CharArraySet stopSet = StopFilter.makeStopSet(TEST_VERSION_CURRENT, "of");
-            //    return new TokenStreamComponents(tokenizer, new StopFilter(TEST_VERSION_CURRENT, tokenizer, stopSet));
-            //}
-            //      };
 
             IEnumerable<Input> keys = AnalyzingSuggesterTest.Shuffle(
                 new Input("wizard of oz", 50)
@@ -310,16 +300,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         {
             // Just deletes "of"
             Analyzer a = new TestEndingHoleAnalyzer();
-            //    Analyzer a = new Analyzer()
-            //{
-            //    @Override
-            //        public TokenStreamComponents createComponents(String field, Reader reader)
-            //{
-            //    Tokenizer tokenizer = new MockTokenizer(reader);
-            //    CharArraySet stopSet = StopFilter.makeStopSet(TEST_VERSION_CURRENT, "of");
-            //    return new TokenStreamComponents(tokenizer, new StopFilter(TEST_VERSION_CURRENT, tokenizer, stopSet));
-            //}
-            //      };
 
             IEnumerable<Input> keys = AnalyzingSuggesterTest.Shuffle(
                 new Input("wizard of of oz", 50)
@@ -351,25 +331,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         }
 
         private static IComparer<Lookup.LookupResult> byScoreThenKey = new ByScoreThenKeyComparator();
-        //  private static IComparer<Lookup.LookupResult> byScoreThenKey = new IComparer<Lookup.LookupResult>() {
-        //    @Override
-        //    public int compare(Lookup.LookupResult a, Lookup.LookupResult b)
-        //{
-        //    if (a.value > b.value)
-        //    {
-        //        return -1;
-        //    }
-        //    else if (a.value < b.value)
-        //    {
-        //        return 1;
-        //    }
-        //    else
-        //    {
-        //        // Tie break by UTF16 sort order:
-        //        return ((String)a.key).compareTo((String)b.key);
-        //    }
-        //}
-        //  };
 
         internal class TestRandomInputIterator : InputIterator
         {
@@ -504,66 +465,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             // Build suggester model:
             FreeTextSuggester sug = new FreeTextSuggester(a, a, grams, (byte)0x20);
             sug.Build(new TestRandomInputIterator(this, docs));
-            //sug.Build(new InputIterator()
-            //{
-            //    int upto;
-
-            //    @Override
-            //        public Comparator<BytesRef> getComparator()
-            //{
-            //    return null;
-            //}
-
-            //@Override
-            //        public BytesRef next()
-            //{
-            //    if (upto == docs.length)
-            //    {
-            //        return null;
-            //    }
-            //    else
-            //    {
-            //        StringBuilder b = new StringBuilder();
-            //        for (String token : docs[upto])
-            //        {
-            //            b.append(' ');
-            //            b.append(token);
-            //        }
-            //        upto++;
-            //        return new BytesRef(b.toString());
-            //    }
-            //}
-
-            //@Override
-            //        public long weight()
-            //{
-            //    return Random().nextLong();
-            //}
-
-            //@Override
-            //        public BytesRef payload()
-            //{
-            //    return null;
-            //}
-
-            //@Override
-            //        public boolean hasPayloads()
-            //{
-            //    return false;
-            //}
-
-            //@Override
-            //        public Set<BytesRef> contexts()
-            //{
-            //    return null;
-            //}
-
-            //@Override
-            //        public boolean hasContexts()
-            //{
-            //    return false;
-            //}
-            //      });
 
             // Build inefficient but hopefully correct model:
             List<IDictionary<string, int?>> gramCounts = new List<IDictionary<string, int?>>(grams);
@@ -763,7 +664,6 @@ namespace Lucene.Net.Search.Suggest.Analyzing
 
                     // Second pass, trim to only top N, and fold those
                     // into overall suggestions:
-                    //Collections.sort(tmp, byScoreThenKey);
                     tmp.Sort(byScoreThenKey);
                     if (tmp.size() > num)
                     {
@@ -797,12 +697,10 @@ namespace Lucene.Net.Search.Suggest.Analyzing
                     backoff *= FreeTextSuggester.ALPHA;
                 }
 
-                //Collections.sort(expected, byScoreThenKey);
                 expected.Sort(byScoreThenKey);
 
                 if (expected.size() > num)
                 {
-                    //expected.subList(num, expected.size()).clear();
                     expected.RemoveRange(num, expected.size() - num);
                 }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d897e714/src/Lucene.Net.Tests.Suggest/Suggest/DocumentDictionaryTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/DocumentDictionaryTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/DocumentDictionaryTest.cs
index 7417881..08e0021 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/DocumentDictionaryTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/DocumentDictionaryTest.cs
@@ -168,7 +168,6 @@ namespace Lucene.Net.Search.Suggest
                 var invalid = docs[invalidTerm];
                 docs.Remove(invalidTerm);
                 assertNotNull(invalid);
-                //assertNotNull(docs.remove(invalidTerm));
             }
             assertTrue(!docs.Any());
 
@@ -201,7 +200,6 @@ namespace Lucene.Net.Search.Suggest
                 var field = f.Utf8ToString();
                 Document doc = docs.ContainsKey(field) ? docs[field] : null;
                 docs.Remove(field);
-                //Document doc = docs.remove(f.Utf8ToString());
                 assertTrue(f.equals(new BytesRef(doc.Get(FIELD_NAME))));
                 IndexableField weightField = doc.GetField(WEIGHT_FIELD_NAME);
                 assertEquals(inputIterator.Weight, (weightField != null) ? Convert.ToInt64(weightField.NumericValue) : 0);
@@ -213,7 +211,6 @@ namespace Lucene.Net.Search.Suggest
                 var invalid = docs[invalidTerm];
                 docs.Remove(invalidTerm);
                 assertNotNull(invalid);
-                //assertNotNull(docs.remove(invalidTerm));
             }
 
 
@@ -267,7 +264,6 @@ namespace Lucene.Net.Search.Suggest
                 var invalid = docs[invalidTerm];
                 docs.Remove(invalidTerm);
                 assertNotNull(invalid);
-                //assertNotNull(docs.remove(invalidTerm));
             }
             assertTrue(!docs.Any());
 
@@ -316,7 +312,6 @@ namespace Lucene.Net.Search.Suggest
                 var toDel = docs[termToDel];
                 assertTrue(toDel != null);
                 docs.Remove(termToDel);
-                //assertTrue(null!=docs.remove(termToDel));
             }
 
             IndexReader ir = DirectoryReader.Open(dir);
@@ -329,7 +324,6 @@ namespace Lucene.Net.Search.Suggest
                 var field = f.Utf8ToString();
                 Document doc = docs.ContainsKey(field) ? docs[field] : null;
                 docs.Remove(field);
-                //Document doc = docs.remove(f.utf8ToString());
                 assertTrue(f.equals(new BytesRef(doc.Get(FIELD_NAME))));
                 IndexableField weightField = doc.GetField(WEIGHT_FIELD_NAME);
                 assertEquals(inputIterator.Weight, (weightField != null) ? Convert.ToInt64(weightField.NumericValue) : 0);
@@ -341,7 +335,6 @@ namespace Lucene.Net.Search.Suggest
                 var invalid = docs[invalidTerm];
                 docs.Remove(invalidTerm);
                 assertNotNull(invalid);
-                //assertNotNull(docs.remove(invalidTerm));
             }
             assertTrue(!docs.Any());
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d897e714/src/Lucene.Net.Tests.Suggest/Suggest/DocumentValueSourceDictionaryTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/DocumentValueSourceDictionaryTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/DocumentValueSourceDictionaryTest.cs
index 7a9a837..66d906c 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/DocumentValueSourceDictionaryTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/DocumentValueSourceDictionaryTest.cs
@@ -153,7 +153,6 @@ namespace Lucene.Net.Search.Suggest
                 string field = f.Utf8ToString();
                 Document doc = docs.ContainsKey(field) ? docs[field] : null;
                 docs.Remove(field);
-                //Document doc = docs.remove(f.utf8ToString());
                 long w1 = Convert.ToInt64(doc.GetField(WEIGHT_FIELD_NAME_1).NumericValue);
                 long w2 = Convert.ToInt64(doc.GetField(WEIGHT_FIELD_NAME_2).NumericValue);
                 long w3 = Convert.ToInt64(doc.GetField(WEIGHT_FIELD_NAME_3).NumericValue);
@@ -197,7 +196,6 @@ namespace Lucene.Net.Search.Suggest
                 string field = f.Utf8ToString();
                 Document doc = docs.ContainsKey(field) ? docs[field] : null;
                 docs.Remove(field);
-                //Document doc = docs.remove(f.utf8ToString());
                 long w1 = Convert.ToInt64(doc.GetField(WEIGHT_FIELD_NAME_1).NumericValue);
                 long w2 = Convert.ToInt64(doc.GetField(WEIGHT_FIELD_NAME_2).NumericValue);
                 long w3 = Convert.ToInt64(doc.GetField(WEIGHT_FIELD_NAME_3).NumericValue);
@@ -248,7 +246,6 @@ namespace Lucene.Net.Search.Suggest
                 var toDel = docs[termToDel];
                 docs.Remove(termToDel);
                 assertTrue(null != toDel);
-                //assertTrue(null!=docs.remove(termToDel));
             }
 
             IndexReader ir = DirectoryReader.Open(dir);
@@ -264,7 +261,6 @@ namespace Lucene.Net.Search.Suggest
                 string field = f.Utf8ToString();
                 Document doc = docs.ContainsKey(field) ? docs[field] : null;
                 docs.Remove(field);
-                //Document doc = docs.remove(f.utf8ToString());
                 long w1 = Convert.ToInt64(doc.GetField(WEIGHT_FIELD_NAME_1).NumericValue);
                 long w2 = Convert.ToInt64(doc.GetField(WEIGHT_FIELD_NAME_2).NumericValue);
                 assertTrue(f.equals(new BytesRef(doc.Get(FIELD_NAME))));
@@ -301,7 +297,6 @@ namespace Lucene.Net.Search.Suggest
                 string field = f.Utf8ToString();
                 Document doc = docs.ContainsKey(field) ? docs[field] : null;
                 docs.Remove(field);
-                //Document doc = docs.remove(f.utf8ToString());
                 assertTrue(f.equals(new BytesRef(doc.Get(FIELD_NAME))));
                 assertEquals(inputIterator.Weight, 10);
                 assertTrue(inputIterator.Payload.equals(doc.GetField(PAYLOAD_FIELD_NAME).BinaryValue));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d897e714/src/Lucene.Net.Tests.Suggest/Suggest/FileDictionaryTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/FileDictionaryTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/FileDictionaryTest.cs
index 96b3e25..d6482e8 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/FileDictionaryTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/FileDictionaryTest.cs
@@ -31,7 +31,7 @@ namespace Lucene.Net.Search.Suggest
         {
             List<string> entryValues = new List<string>();
             StringBuilder sb = new StringBuilder();
-            String term = TestUtil.RandomSimpleString(Random(), 1, 300);
+            string term = TestUtil.RandomSimpleString(Random(), 1, 300);
             sb.append(term);
             entryValues.Add(term);
             if (hasWeight)
@@ -44,7 +44,7 @@ namespace Lucene.Net.Search.Suggest
             if (hasPayload)
             {
                 sb.append(fieldDelimiter);
-                String payload = TestUtil.RandomSimpleString(Random(), 1, 300);
+                string payload = TestUtil.RandomSimpleString(Random(), 1, 300);
                 sb.append(payload);
                 entryValues.Add(payload);
             }
@@ -52,7 +52,7 @@ namespace Lucene.Net.Search.Suggest
             return new KeyValuePair<List<string>, string>(entryValues, sb.toString());
         }
 
-        private KeyValuePair<List<List<String>>, String> generateFileInput(int count, String fieldDelimiter, bool hasWeights, bool hasPayloads)
+        private KeyValuePair<List<List<string>>, string> generateFileInput(int count, string fieldDelimiter, bool hasWeights, bool hasPayloads)
         {
             List<List<string>> entries = new List<List<string>>();
             StringBuilder sb = new StringBuilder();
@@ -121,10 +121,10 @@ namespace Lucene.Net.Search.Suggest
         [Test]
         public void TestFileWithWeightAndPayload()
         {
-            KeyValuePair<List<List<String>>, String> fileInput = generateFileInput(AtLeast(100), FileDictionary.DEFAULT_FIELD_DELIMITER, true, true);
+            KeyValuePair<List<List<string>>, string> fileInput = generateFileInput(AtLeast(100), FileDictionary.DEFAULT_FIELD_DELIMITER, true, true);
             Stream inputReader = new MemoryStream(fileInput.Value.getBytes(Encoding.UTF8));
             FileDictionary dictionary = new FileDictionary(inputReader);
-            List<List<String>> entries = fileInput.Key;
+            List<List<string>> entries = fileInput.Key;
             InputIterator inputIter = dictionary.EntryIterator;
             assertTrue(inputIter.HasPayloads);
             BytesRef term;
@@ -132,7 +132,7 @@ namespace Lucene.Net.Search.Suggest
             while ((term = inputIter.Next()) != null)
             {
                 assertTrue(entries.size() > count);
-                List<String> entry = entries[count];
+                List<string> entry = entries[count];
                 assertTrue(entry.size() >= 2); // at least term and weight
                 assertEquals(entry[0], term.Utf8ToString());
                 assertEquals(long.Parse(entry[1], CultureInfo.InvariantCulture), inputIter.Weight);
@@ -155,7 +155,7 @@ namespace Lucene.Net.Search.Suggest
             KeyValuePair<List<List<string>>, string> fileInput = generateFileInput(1, FileDictionary.DEFAULT_FIELD_DELIMITER, true, true);
             Stream inputReader = new MemoryStream(fileInput.Value.getBytes(Encoding.UTF8));
             FileDictionary dictionary = new FileDictionary(inputReader);
-            List<List<String>> entries = fileInput.Key;
+            List<List<string>> entries = fileInput.Key;
             InputIterator inputIter = dictionary.EntryIterator;
             assertTrue(inputIter.HasPayloads);
             BytesRef term;
@@ -163,7 +163,7 @@ namespace Lucene.Net.Search.Suggest
             while ((term = inputIter.Next()) != null)
             {
                 assertTrue(entries.size() > count);
-                List<String> entry = entries[count];
+                List<string> entry = entries[count];
                 assertTrue(entry.size() >= 2); // at least term and weight
                 assertEquals(entry[0], term.Utf8ToString());
                 assertEquals(long.Parse(entry[1], CultureInfo.InvariantCulture), inputIter.Weight);
@@ -187,7 +187,7 @@ namespace Lucene.Net.Search.Suggest
             KeyValuePair<List<List<string>>, string> fileInput = generateFileInput(AtLeast(100), " , ", true, true);
             Stream inputReader = new MemoryStream(fileInput.Value.getBytes(Encoding.UTF8));
             FileDictionary dictionary = new FileDictionary(inputReader, " , ");
-            List<List<String>> entries = fileInput.Key;
+            List<List<string>> entries = fileInput.Key;
             InputIterator inputIter = dictionary.EntryIterator;
             assertTrue(inputIter.HasPayloads);
             BytesRef term;
@@ -195,7 +195,7 @@ namespace Lucene.Net.Search.Suggest
             while ((term = inputIter.Next()) != null)
             {
                 assertTrue(entries.size() > count);
-                List<String> entry = entries[count];
+                List<string> entry = entries[count];
                 assertTrue(entry.size() >= 2); // at least term and weight
                 assertEquals(entry[0], term.Utf8ToString());
                 assertEquals(long.Parse(entry[1]), inputIter.Weight);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d897e714/src/Lucene.Net.Tests.Suggest/Suggest/Fst/FSTCompletionTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/FSTCompletionTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/FSTCompletionTest.cs
index a54bc85..00c4fbc 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/FSTCompletionTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/FSTCompletionTest.cs
@@ -276,7 +276,6 @@ namespace Lucene.Net.Search.Suggest.Fst
                 int colLen = Math.Max(MaxLen(expected), MaxLen(result));
 
                 StringBuilder b = new StringBuilder();
-                //string format = "%" + colLen + "s  " + "%" + colLen + "s\n";
                 string format = "{0," + colLen + "}  {1," + colLen + "}\n";
                 b.append(string.Format(CultureInfo.InvariantCulture, format, "Expected", "Result"));
                 for (int i = 0; i < Math.Max(result.Length, expected.Length); i++)
@@ -296,7 +295,6 @@ namespace Lucene.Net.Search.Suggest.Fst
             string[] result = new string[expected.Length];
             for (int i = 0; i < result.Length; i++)
             {
-                //result[i] = expected[i].replaceAll("\\/[0-9\\.]+", "");
                 result[i] = Regex.Replace(expected[i], "\\/[0-9\\.]+", "");
             }
             return result;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d897e714/src/Lucene.Net.Tests.Suggest/Suggest/Fst/WFSTCompletionTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/WFSTCompletionTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/WFSTCompletionTest.cs
index 5a5156b..59a58a5 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Fst/WFSTCompletionTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Fst/WFSTCompletionTest.cs
@@ -1,6 +1,4 @@
-´╗┐using Lucene.Net.Search.Suggest;
-using Lucene.Net.Search.Suggest.Fst;
-using Lucene.Net.Support;
+´╗┐using Lucene.Net.Support;
 using Lucene.Net.Util;
 using NUnit.Framework;
 using System;
@@ -194,21 +192,6 @@ namespace Lucene.Net.Search.Suggest.Fst
                 assertTrue(matches.size() > 0);
                 matches.Sort(new TestRandomComparer());
 
-                //      Collections.Sort(matches, new Comparator<Lookup.LookupResult>() {
-                //        @Override
-                //        public int compare(Lookup.LookupResult left, Lookup.LookupResult right)
-                //{
-                //    int cmp = Float.compare(right.value, left.value);
-                //    if (cmp == 0)
-                //    {
-                //        return left.compareTo(right);
-                //    }
-                //    else
-                //    {
-                //        return cmp;
-                //    }
-                //}
-                //      });
                 if (matches.size() > topN)
                 {
                     //matches.SubList(topN, matches.size()).clear();
@@ -230,12 +213,6 @@ namespace Lucene.Net.Search.Suggest.Fst
         {
             public int Compare(Lookup.LookupResult left, Lookup.LookupResult right)
             {
-                // LUCENENET TODO: Work out how to cast long to float in the same way Java does
-                // http://stackoverflow.com/q/1293819/181087
-                //int cmp = Float.compare(right.value, left.value);
-                // LUCENENET NOTE: It shouldn't matter that the decimal place is not correct here,
-                // since all we care about is the relative difference between the numbers. Hopefully,
-                // the loss of precision is equivalent between Java and .NET...
                 int cmp = ((float)right.value).CompareTo((float)left.value);
                 if (cmp == 0)
                 {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d897e714/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs
index 58af7f9..2c9682d 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/LookupBenchmarkTest.cs
@@ -35,7 +35,6 @@ namespace Lucene.Net.Search.Suggest
      * limitations under the License.
      */
 
-    // LUCENENET TODO: Run these tests
     [Ignore("COMMENT ME TO RUN BENCHMARKS!")]
     public class LookupBenchmarkTest : LuceneTestCase
     {
@@ -76,11 +75,9 @@ namespace Lucene.Net.Search.Suggest
             Debug.Assert(false, "disable assertions before running benchmarks!");
             IList<Input> input = ReadTop50KWiki();
             input = CollectionsHelper.Shuffle(input);
-            //Collections.Shuffle(input, random);
-            LookupBenchmarkTest.dictionaryInput = input.ToArray();
+            dictionaryInput = input.ToArray();
             input = CollectionsHelper.Shuffle(input);
-            //Collections.shuffle(input, random);
-            LookupBenchmarkTest.benchmarkInput = input;
+            benchmarkInput = input;
         }
 
         static readonly Encoding UTF_8 = Encoding.UTF8;
@@ -122,17 +119,8 @@ namespace Lucene.Net.Search.Suggest
             {
                 BenchmarkResult result = Measure(new CallableIntHelper(this, cls));
 
-                //        BenchmarkResult result = measure(new ICallable<int>() {
-                //        @Override
-                //        public Integer call() 
-                //{
-                //    Lookup lookup = buildLookup(cls, dictionaryInput);          
-                //          return lookup.hashCode();
-                //}
-                //      });
-
                 Console.WriteLine(
-                    string.Format(CultureInfo.InvariantCulture, "{0:000000000000000}s input: {1}, time[ms]: {2}" /*"%-15s input: %d, time[ms]: %s"*/,
+                    string.Format(CultureInfo.InvariantCulture, "{0,15}s input: {1}, time[ms]: {2}" /*"%-15s input: %d, time[ms]: %s"*/,
                         cls.Name,
                         dictionaryInput.Length,
                         result.average.ToString()));
@@ -167,7 +155,7 @@ namespace Lucene.Net.Search.Suggest
                 Lookup lookup = BuildLookup(cls, dictionaryInput);
                 long sizeInBytes = lookup.SizeInBytes();
                 Console.WriteLine(
-            string.Format(CultureInfo.InvariantCulture, "{0:000000000000000}s size[B]:{1:#,##0}" /*"%-15s size[B]:%,13d"*/,
+            string.Format(CultureInfo.InvariantCulture, "{0,15}s size[B]:{1:#,##0}" /*"%-15s size[B]:%,13d"*/,
                 lookup.GetType().Name,
                 sizeInBytes));
             }
@@ -259,30 +247,10 @@ namespace Lucene.Net.Search.Suggest
                     input.Add(sub);
                 }
 
-                BenchmarkResult result = null;// = Measure(new PerformanceTestCallableIntHelper(this, input, lookup));
-
-                try
-                {
-                    result = Measure(new PerformanceTestCallableIntHelper(this, input, lookup));
-                }
-                catch (Exception e)
-                {
-                    string foo = "";
-                }
-
-                //        BenchmarkResult result = measure(new Callable<Integer>() {
-                //        public Integer call() 
-                //{
-                //          int v = 0;
-                //          for (String term : input) {
-                //        v += lookup.lookup(term, onlyMorePopular, num).size();
-                //    }
-                //          return v;
-                //}
-                //      });
+                BenchmarkResult result = Measure(new PerformanceTestCallableIntHelper(this, input, lookup));
 
                 Console.WriteLine(
-            string.Format(CultureInfo.InvariantCulture, "{0:000000000000000}s queries: {1}, time[ms]: {2}, ~kQPS: {3:#.0}" /*"%-15s queries: %d, time[ms]: %s, ~kQPS: %.0f"*/,
+                    string.Format(CultureInfo.InvariantCulture, "{0,15}s queries: {1}, time[ms]: {2}, ~kQPS: {3:#.0}" /*"%-15s queries: %d, time[ms]: %s, ~kQPS: %.0f"*/,
                 lookup.GetType().Name,
                 input.size(),
                 result.average.toString(),

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d897e714/src/Lucene.Net.Tests.Suggest/Suggest/PersistenceTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/PersistenceTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/PersistenceTest.cs
index 6e84ff7..f6af6bf 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/PersistenceTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/PersistenceTest.cs
@@ -69,7 +69,6 @@ namespace Lucene.Net.Search.Suggest
         {
 
             // Add all input keys.
-            //Lookup lookup = lookupClass.newInstance();
             Lookup lookup = (Lookup)Activator.CreateInstance(lookupClass);
             Input[] keys = new Input[this.keys.Length];
             for (int i = 0; i < keys.Length; i++)
@@ -81,7 +80,6 @@ namespace Lucene.Net.Search.Suggest
             lookup.Store(new FileStream(Path.Combine(storeDir.FullName, "lookup.dat"), FileMode.OpenOrCreate));
 
             // Re-read it from disk.
-            //lookup = lookupClass.newInstance();
             lookup = (Lookup)Activator.CreateInstance(lookupClass);
             lookup.Load(new FileStream(Path.Combine(storeDir.FullName, "lookup.dat"), FileMode.Open));
 


Mime
View raw message