lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From synhers...@apache.org
Subject [17/50] [abbrv] lucenenet git commit: Ported QueryParser.Surround namespace + tests.
Date Sun, 11 Sep 2016 21:30:48 GMT
Ported QueryParser.Surround namespace + tests.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/1e7576a6
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/1e7576a6
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/1e7576a6

Branch: refs/heads/master
Commit: 1e7576a6cc2ccf8277004519ee8971283f4c89dc
Parents: 6224f3e
Author: Shad Storhaug <shad@shadstorhaug.com>
Authored: Tue Aug 2 14:48:31 2016 +0700
Committer: Shad Storhaug <shad@shadstorhaug.com>
Committed: Fri Sep 2 22:30:39 2016 +0700

----------------------------------------------------------------------
 .../Lucene.Net.QueryParser.csproj               |  27 +
 .../Surround/Parser/CharStream.cs               | 134 +++
 .../Surround/Parser/FastCharStream.cs           | 158 ++++
 .../Surround/Parser/ParseException.cs           | 234 +++++
 .../Surround/Parser/QueryParser.cs              | 912 +++++++++++++++++++
 .../Surround/Parser/QueryParserConstants.cs     | 120 +++
 .../Surround/Parser/QueryParserTokenManager.cs  | 760 ++++++++++++++++
 Lucene.Net.QueryParser/Surround/Parser/Token.cs | 142 +++
 .../Surround/Parser/TokenMgrError.cs            | 170 ++++
 .../Surround/Query/AndQuery.cs                  |  39 +
 .../Surround/Query/BasicQueryFactory.cs         | 110 +++
 .../Surround/Query/ComposedQuery.cs             | 144 +++
 .../Surround/Query/DistanceQuery.cs             | 117 +++
 .../Surround/Query/DistanceRewriteQuery.cs      |  35 +
 .../Surround/Query/DistanceSubQuery.cs          |  36 +
 .../Surround/Query/FieldsQuery.cs               | 105 +++
 .../Surround/Query/NotQuery.cs                  |  48 +
 .../Surround/Query/OrQuery.cs                   |  71 ++
 .../Surround/Query/RewriteQuery.cs              |  85 ++
 .../Surround/Query/SimpleTerm.cs                | 118 +++
 .../Surround/Query/SimpleTermRewriteQuery.cs    |  64 ++
 .../Surround/Query/SpanNearClauseFactory.cs     |  93 ++
 .../Surround/Query/SrndBooleanQuery.cs          |  51 ++
 .../Surround/Query/SrndPrefixQuery.cs           | 108 +++
 .../Surround/Query/SrndQuery.cs                 | 149 +++
 .../Surround/Query/SrndTermQuery.cs             |  63 ++
 .../Surround/Query/SrndTruncQuery.cs            | 139 +++
 .../Surround/Query/TooManyBasicQueries.cs       |  30 +
 .../Lucene.Net.Tests.QueryParser.csproj         |   7 +
 .../Surround/Query/BooleanQueryTst.cs           | 142 +++
 .../Surround/Query/ExceptionQueryTst.cs         |  76 ++
 .../Surround/Query/SingleFieldTestDb.cs         |  55 ++
 .../Surround/Query/SrndQueryTest.cs             |  48 +
 .../Surround/Query/Test01Exceptions.cs          |  72 ++
 .../Surround/Query/Test02Boolean.cs             | 178 ++++
 .../Surround/Query/Test03Distance.cs            | 341 +++++++
 36 files changed, 5181 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj b/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
index 646e931..dc38a02 100644
--- a/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
+++ b/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
@@ -58,6 +58,33 @@
     <Compile Include="Flexible\Standard\CommonQueryParserConfiguration.cs" />
     <Compile Include="Properties\AssemblyInfo.cs" />
     <Compile Include="Simple\SimpleQueryParser.cs" />
+    <Compile Include="Surround\Parser\CharStream.cs" />
+    <Compile Include="Surround\Parser\FastCharStream.cs" />
+    <Compile Include="Surround\Parser\ParseException.cs" />
+    <Compile Include="Surround\Parser\QueryParser.cs" />
+    <Compile Include="Surround\Parser\QueryParserConstants.cs" />
+    <Compile Include="Surround\Parser\QueryParserTokenManager.cs" />
+    <Compile Include="Surround\Parser\Token.cs" />
+    <Compile Include="Surround\Parser\TokenMgrError.cs" />
+    <Compile Include="Surround\Query\AndQuery.cs" />
+    <Compile Include="Surround\Query\BasicQueryFactory.cs" />
+    <Compile Include="Surround\Query\ComposedQuery.cs" />
+    <Compile Include="Surround\Query\DistanceQuery.cs" />
+    <Compile Include="Surround\Query\DistanceRewriteQuery.cs" />
+    <Compile Include="Surround\Query\DistanceSubQuery.cs" />
+    <Compile Include="Surround\Query\FieldsQuery.cs" />
+    <Compile Include="Surround\Query\NotQuery.cs" />
+    <Compile Include="Surround\Query\OrQuery.cs" />
+    <Compile Include="Surround\Query\RewriteQuery.cs" />
+    <Compile Include="Surround\Query\SimpleTerm.cs" />
+    <Compile Include="Surround\Query\SimpleTermRewriteQuery.cs" />
+    <Compile Include="Surround\Query\SpanNearClauseFactory.cs" />
+    <Compile Include="Surround\Query\SrndBooleanQuery.cs" />
+    <Compile Include="Surround\Query\SrndPrefixQuery.cs" />
+    <Compile Include="Surround\Query\SrndQuery.cs" />
+    <Compile Include="Surround\Query\SrndTermQuery.cs" />
+    <Compile Include="Surround\Query\SrndTruncQuery.cs" />
+    <Compile Include="Surround\Query\TooManyBasicQueries.cs" />
   </ItemGroup>
   <ItemGroup>
     <ProjectReference Include="..\src\Lucene.Net.Analysis.Common\Lucene.Net.Analysis.Common.csproj">

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs b/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs
new file mode 100644
index 0000000..bfb2fc2
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs
@@ -0,0 +1,134 @@
+using System;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+
+    /// <summary> 
+    /// This interface describes a character stream that maintains line and
+    /// column number positions of the characters.  It also has the capability
+    /// to backup the stream to some extent.  An implementation of this
+    /// interface is used in the TokenManager implementation generated by
+    /// JavaCCParser.
+    /// 
+    /// All the methods except backup can be implemented in any fashion. backup
+    /// needs to be implemented correctly for the correct operation of the lexer.
+    /// Rest of the methods are all used to get information like line number,
+    /// column number and the String that constitutes a token and are not used
+    /// by the lexer. Hence their implementation won't affect the generated lexer's
+    /// operation.
+    /// </summary>
+    public interface ICharStream
+    {
+        /// <summary> 
+        /// Returns the next character from the selected input.  The method
+        /// of selecting the input is the responsibility of the class
+        /// implementing this interface.  Can throw any java.io.IOException.
+        /// </summary>
+        char ReadChar();
+
+        /// <summary>
+        /// Returns the column position of the character last read.
+        /// </summary>
+        /// <deprecated>
+        /// </deprecated>
+        /// <seealso cref="EndColumn">
+        /// </seealso>
+        [Obsolete]
+        int Column { get; }
+
+        /// <summary>
+        /// Returns the line number of the character last read.
+        /// </summary>
+        /// <deprecated>
+        /// </deprecated>
+        /// <seealso cref="EndLine">
+        /// </seealso>
+        [Obsolete]
+        int Line { get; }
+
+        /// <summary>
+        /// Returns the column number of the last character for current token (being
+        /// matched after the last call to BeginTOken).
+        /// </summary>
+        int EndColumn { get; }
+
+        /// <summary> 
+        /// Returns the line number of the last character for current token (being
+        /// matched after the last call to BeginTOken).
+        /// </summary>
+        int EndLine { get; }
+
+        /// <summary> 
+        /// Returns the column number of the first character for current token (being
+        /// matched after the last call to BeginTOken).
+        /// </summary>
+        int BeginColumn { get; }
+
+        /// <summary> 
+        /// Returns the line number of the first character for current token (being
+        /// matched after the last call to BeginTOken).
+        /// </summary>
+        int BeginLine { get; }
+
+        /// <summary> 
+        /// Backs up the input stream by amount steps. Lexer calls this method if it
+        /// had already read some characters, but could not use them to match a
+        /// (longer) token. So, they will be used again as the prefix of the next
+        /// token and it is the implemetation's responsibility to do this right.
+        /// </summary>
+        void Backup(int amount);
+
+        /// <summary> 
+        /// Returns the next character that marks the beginning of the next token.
+        /// All characters must remain in the buffer between two successive calls
+        /// to this method to implement backup correctly.
+        /// </summary>
+        char BeginToken();
+
+        /// <summary> 
+        /// Returns a string made up of characters from the marked token beginning
+        /// to the current buffer position. Implementations have the choice of returning
+        /// anything that they want to. For example, for efficiency, one might decide
+        /// to just return null, which is a valid implementation.
+        /// </summary>
+        string Image { get; }
+
+        /// <summary> 
+        /// Returns an array of characters that make up the suffix of length 'len' for
+        /// the currently matched token. This is used to build up the matched string
+        /// for use in actions in the case of MORE. A simple and inefficient
+        /// implementation of this is as follows :
+        /// 
+        /// {
+        /// String t = GetImage();
+        /// return t.substring(t.length() - len, t.length()).toCharArray();
+        /// }
+        /// </summary>
+        char[] GetSuffix(int len);
+
+        /// <summary> 
+        /// The lexer calls this function to indicate that it is done with the stream
+        /// and hence implementations can free any resources held by this class.
+        /// Again, the body of this function can be just empty and it will not
+        /// affect the lexer's operation.
+        /// </summary>
+        void Done();
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs b/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs
new file mode 100644
index 0000000..b33bd83
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs
@@ -0,0 +1,158 @@
+using System;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// An efficient implementation of JavaCC's CharStream interface.  <p/>Note that
+    /// this does not do line-number counting, but instead keeps track of the
+    /// character position of the token in the input, as required by Lucene's <see cref="Lucene.Net.Analysis.Token" />
+    /// API.
+    /// </summary>
+    public sealed class FastCharStream : ICharStream
+    {
+        internal char[] buffer = null;
+
+        internal int bufferLength = 0; // end of valid chars
+        internal int bufferPosition = 0; // next char to read
+
+        internal int tokenStart = 0; // offset in buffer
+        internal int bufferStart = 0; // position in file of buffer
+
+        internal System.IO.TextReader input; // source of chars
+
+        /// <summary>
+        /// Constructs from a Reader. 
+        /// </summary>
+        public FastCharStream(System.IO.TextReader r)
+        {
+            input = r;
+        }
+
+        public char ReadChar()
+        {
+            if (bufferPosition >= bufferLength)
+                Refill();
+            return buffer[bufferPosition++];
+        }
+
+        private void Refill()
+        {
+            int newPosition = bufferLength - tokenStart;
+
+            if (tokenStart == 0)
+            {
+                // token won't fit in buffer
+                if (buffer == null)
+                {
+                    // first time: alloc buffer
+                    buffer = new char[2048];
+                }
+                else if (bufferLength == buffer.Length)
+                {
+                    // grow buffer
+                    char[] newBuffer = new char[buffer.Length * 2];
+                    Array.Copy(buffer, 0, newBuffer, 0, bufferLength);
+                    buffer = newBuffer;
+                }
+            }
+            else
+            {
+                // shift token to front
+                Array.Copy(buffer, tokenStart, buffer, 0, newPosition);
+            }
+
+            bufferLength = newPosition; // update state
+            bufferPosition = newPosition;
+            bufferStart += tokenStart;
+            tokenStart = 0;
+
+            int charsRead = input.Read(buffer, newPosition, buffer.Length - newPosition);
+            if (charsRead <= 0)
+                throw new System.IO.IOException("read past eof");
+            else
+                bufferLength += charsRead;
+        }
+
+        public char BeginToken()
+        {
+            tokenStart = bufferPosition;
+            return ReadChar();
+        }
+
+        public void Backup(int amount)
+        {
+            bufferPosition -= amount;
+        }
+
+        public string Image
+        {
+            get { return new System.String(buffer, tokenStart, bufferPosition - tokenStart); }
+        }
+
+        public char[] GetSuffix(int len)
+        {
+            char[] value_Renamed = new char[len];
+            Array.Copy(buffer, bufferPosition - len, value_Renamed, 0, len);
+            return value_Renamed;
+        }
+
+        public void Done()
+        {
+            try
+            {
+                input.Close();
+            }
+            catch (System.IO.IOException e)
+            {
+                System.Console.Error.WriteLine("Caught: " + e + "; ignoring.");
+            }
+        }
+
+        public int Column
+        {
+            get { return bufferStart + bufferPosition; }
+        }
+
+        public int Line
+        {
+            get { return 1; }
+        }
+
+        public int EndColumn
+        {
+            get { return bufferStart + bufferPosition; }
+        }
+
+        public int EndLine
+        {
+            get { return 1; }
+        }
+
+        public int BeginColumn
+        {
+            get { return bufferStart + tokenStart; }
+        }
+
+        public int BeginLine
+        {
+            get { return 1; }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs b/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
new file mode 100644
index 0000000..1716658
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
@@ -0,0 +1,234 @@
+using System;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary> 
+    /// This exception is thrown when parse errors are encountered.
+    /// You can explicitly create objects of this exception type by
+    /// calling the method GenerateParseException in the generated
+    /// parser.
+    /// 
+    /// You can modify this class to customize your error reporting
+    /// mechanisms so long as you retain the public fields.
+    /// </summary>
+    [Serializable]
+    public class ParseException : Exception
+    {
+        /// <summary>
+        /// This constructor is used by the method "GenerateParseException"
+        /// in the generated parser.  Calling this constructor generates
+        /// a new object of this type with the fields "currentToken",
+        /// "expectedTokenSequences", and "tokenImage" set.
+        /// </summary>
+        /// <param name="currentTokenVal"></param>
+        /// <param name="expectedTokenSequencesVal"></param>
+        /// <param name="tokenImageVal"></param>
+        public ParseException(Token currentTokenVal,
+                        int[][] expectedTokenSequencesVal,
+                        string[] tokenImageVal)
+            : base(Initialize(currentTokenVal, expectedTokenSequencesVal, tokenImageVal))
+        {
+            currentToken = currentTokenVal;
+            expectedTokenSequences = expectedTokenSequencesVal;
+            tokenImage = tokenImageVal;
+        }
+
+        /**
+         * The following constructors are for use by you for whatever
+         * purpose you can think of.  Constructing the exception in this
+         * manner makes the exception behave in the normal way - i.e., as
+         * documented in the class "Throwable".  The fields "errorToken",
+         * "expectedTokenSequences", and "tokenImage" do not contain
+         * relevant information.  The JavaCC generated code does not use
+         * these constructors.
+         */
+
+        public ParseException()
+        { }
+
+        public ParseException(string message)
+            : base(message)
+        { }
+
+        public ParseException(string message, Exception innerException)
+            : base(message, innerException)
+        { }
+
+
+        /// <summary> 
+        /// This is the last token that has been consumed successfully.  If
+        /// this object has been created due to a parse error, the token
+        /// following this token will (therefore) be the first error token.
+        /// </summary>
+        public Token currentToken;
+
+        /// <summary> 
+        /// Each entry in this array is an array of integers.  Each array
+        /// of integers represents a sequence of tokens (by their ordinal
+        /// values) that is expected at this point of the parse.
+        /// </summary>
+        public int[][] expectedTokenSequences;
+
+        /// <summary> 
+        /// This is a reference to the "tokenImage" array of the generated
+        /// parser within which the parse error occurred.  This array is
+        /// defined in the generated ...Constants interface.
+        /// </summary>
+        public string[] tokenImage;
+
+
+        /// <summary>
+        /// It uses "currentToken" and "expectedTokenSequences" to generate a parse
+        /// error message and returns it.  If this object has been created
+        /// due to a parse error, and you do not catch it (it gets thrown
+        /// from the parser) the correct error message
+        /// gets displayed.
+        /// </summary>
+        /// <param name="currentToken"></param>
+        /// <param name="expectedTokenSequences"></param>
+        /// <param name="tokenImage"></param>
+        /// <returns></returns>
+        private static string Initialize(Token currentToken,
+            int[][] expectedTokenSequences,
+            string[] tokenImage)
+        {
+
+            StringBuilder expected = new StringBuilder();
+            int maxSize = 0;
+            for (int i = 0; i < expectedTokenSequences.Length; i++)
+            {
+                if (maxSize < expectedTokenSequences[i].Length)
+                {
+                    maxSize = expectedTokenSequences[i].Length;
+                }
+                for (int j = 0; j < expectedTokenSequences[i].Length; j++)
+                {
+                    expected.Append(tokenImage[expectedTokenSequences[i][j]]).Append(' ');
+                }
+                if (expectedTokenSequences[i][expectedTokenSequences[i].Length - 1] != 0)
+                {
+                    expected.Append("...");
+                }
+                expected.Append(eol).Append("    ");
+            }
+            string retval = "Encountered \"";
+            Token tok = currentToken.next;
+            for (int i = 0; i < maxSize; i++)
+            {
+                if (i != 0)
+                    retval += " ";
+                if (tok.kind == 0)
+                {
+                    retval += tokenImage[0];
+                    break;
+                }
+                retval += (" " + tokenImage[tok.kind]);
+                retval += " \"";
+                retval += Add_escapes(tok.image);
+                retval += " \"";
+                tok = tok.next;
+            }
+            retval += ("\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn);
+            retval += ("." + eol);
+            if (expectedTokenSequences.Length == 1)
+            {
+                retval += ("Was expecting:" + eol + "    ");
+            }
+            else
+            {
+                retval += ("Was expecting one of:" + eol + "    ");
+            }
+            retval += expected.ToString();
+            return retval;
+        }
+
+        /// <summary> 
+        /// The end of line string for this machine.
+        /// </summary>
+        protected static string eol = Environment.NewLine;
+
+        /// <summary> 
+        /// Used to convert raw characters to their escaped version
+        /// when these raw version cannot be used as part of an ASCII
+        /// string literal.
+        /// </summary>
+        internal static string Add_escapes(string str)
+        {
+            StringBuilder retval = new StringBuilder();
+            char ch;
+            for (int i = 0; i < str.Length; i++)
+            {
+                switch (str[i])
+                {
+
+                    case (char)(0):
+                        continue;
+
+                    case '\b':
+                        retval.Append("\\b");
+                        continue;
+
+                    case '\t':
+                        retval.Append("\\t");
+                        continue;
+
+                    case '\n':
+                        retval.Append("\\n");
+                        continue;
+
+                    case '\f':
+                        retval.Append("\\f");
+                        continue;
+
+                    case '\r':
+                        retval.Append("\\r");
+                        continue;
+
+                    case '\"':
+                        retval.Append("\\\"");
+                        continue;
+
+                    case '\'':
+                        retval.Append("\\\'");
+                        continue;
+
+                    case '\\':
+                        retval.Append("\\\\");
+                        continue;
+
+                    default:
+                        if ((ch = str[i]) < 0x20 || ch > 0x7e)
+                        {
+                            System.String s = "0000" + System.Convert.ToString(ch, 16);
+                            retval.Append("\\u" + s.Substring(s.Length - 4, (s.Length) - (s.Length - 4)));
+                        }
+                        else
+                        {
+                            retval.Append(ch);
+                        }
+                        continue;
+
+                }
+            }
+            return retval.ToString();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs b/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
new file mode 100644
index 0000000..49ef7d4
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
@@ -0,0 +1,912 @@
+using Lucene.Net.QueryParser.Surround.Query;
+using System;
+using System.Collections.Generic;
+using System.IO;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// This class is generated by JavaCC.  The only method that clients should need
+    /// to call is {@link #parse parse()}.
+    ///
+
+    /// <p>This parser generates queries that make use of position information
+    ///   (Span queries). It provides positional operators (<code>w</code> and
+    ///   <code>n</code>) that accept a numeric distance, as well as boolean
+    ///   operators (<code>and</code>, <code>or</code>, and <code>not</code>,
+    ///   wildcards (<code>///</code> and <code>?</code>), quoting (with
+    ///   <code>"</code>), and boosting (via <code>^</code>).</p>
+
+    ///  <p>The operators (W, N, AND, OR, NOT) can be expressed lower-cased or
+    ///   upper-cased, and the non-unary operators (everything but NOT) support
+    ///   both infix <code>(a AND b AND c)</code> and prefix <code>AND(a, b,
+    ///   c)</code> notation. </p>
+
+    ///  <p>The W and N operators express a positional relationship among their
+    ///  operands.  N is ordered, and W is unordered.  The distance is 1 by
+    ///  default, meaning the operands are adjacent, or may be provided as a
+    ///  prefix from 2-99.  So, for example, 3W(a, b) means that terms a and b
+    ///  must appear within three positions of each other, or in other words, up
+    ///  to two terms may appear between a and b.  </p>
+    /// </summary>
+    public class QueryParser
+    {
+        internal readonly int minimumPrefixLength = 3;
+        internal readonly int minimumCharsInTrunc = 3;
+        internal readonly string truncationErrorMessage = "Too unrestrictive truncation: ";
+        internal readonly string boostErrorMessage = "Cannot handle boost value: ";
+
+        /* CHECKME: These should be the same as for the tokenizer. How? */
+        internal readonly char truncator = '*';
+        internal readonly char anyChar = '?';
+        internal readonly char quote = '"';
+        internal readonly char fieldOperator = ':';
+        internal readonly char comma = ','; /* prefix list separator */
+        internal readonly char carat = '^'; /* weight operator */
+
+        public static SrndQuery Parse(string query)
+        {
+            QueryParser parser = new QueryParser();
+            return parser.Parse2(query);
+        }
+
+        public QueryParser()
+            : this(new FastCharStream(new StringReader("")))
+        {
+        }
+
+        public virtual SrndQuery Parse2(string query)
+        {
+            ReInit(new FastCharStream(new StringReader(query)));
+            try
+            {
+                return TopSrndQuery();
+            }
+            catch (TokenMgrError tme)
+            {
+                throw new ParseException(tme.Message);
+            }
+        }
+
+        protected virtual SrndQuery GetFieldsQuery(
+            SrndQuery q, IEnumerable<string> fieldNames)
+        {
+            /* FIXME: check acceptable subquery: at least one subquery should not be
+             * a fields query.
+             */
+            return new FieldsQuery(q, fieldNames, fieldOperator);
+        }
+
+        protected virtual SrndQuery GetOrQuery(IEnumerable<SrndQuery> queries, bool infix, Token orToken)
+        {
+            return new OrQuery(queries, infix, orToken.image);
+        }
+
+        protected virtual SrndQuery GetAndQuery(IEnumerable<SrndQuery> queries, bool infix, Token andToken)
+        {
+            return new AndQuery(queries, infix, andToken.image);
+        }
+
+        protected virtual SrndQuery GetNotQuery(IEnumerable<SrndQuery> queries, Token notToken)
+        {
+            return new NotQuery(queries, notToken.image);
+        }
+
+        protected static int GetOpDistance(string distanceOp)
+        {
+            /* W, 2W, 3W etc -> 1, 2 3, etc. Same for N, 2N ... */
+            return distanceOp.Length == 1
+              ? 1
+              : int.Parse(distanceOp.Substring(0, distanceOp.Length - 1));
+        }
+
+        protected static void CheckDistanceSubQueries(DistanceQuery distq, string opName)
+        {
+            string m = distq.DistanceSubQueryNotAllowed();
+            if (m != null)
+            {
+                throw new ParseException("Operator " + opName + ": " + m);
+            }
+        }
+
+        protected virtual SrndQuery GetDistanceQuery(
+            IEnumerable<SrndQuery> queries,
+            bool infix,
+            Token dToken,
+            bool ordered)
+        {
+            DistanceQuery dq = new DistanceQuery(queries,
+                                                infix,
+                                                GetOpDistance(dToken.image),
+                                                dToken.image,
+                                                ordered);
+            CheckDistanceSubQueries(dq, dToken.image);
+            return dq;
+        }
+
+        protected virtual SrndQuery GetTermQuery(
+              String term, bool quoted)
+        {
+            return new SrndTermQuery(term, quoted);
+        }
+
+        protected virtual bool AllowedSuffix(String suffixed)
+        {
+            return (suffixed.Length - 1) >= minimumPrefixLength;
+        }
+
+        protected virtual SrndQuery GetPrefixQuery(
+            string prefix, bool quoted)
+        {
+            return new SrndPrefixQuery(prefix, quoted, truncator);
+        }
+
+        protected virtual bool AllowedTruncation(string truncated)
+        {
+            /* At least 3 normal characters needed. */
+            int nrNormalChars = 0;
+            for (int i = 0; i < truncated.Length; i++)
+            {
+                char c = truncated[i];
+                if ((c != truncator) && (c != anyChar))
+                {
+                    nrNormalChars++;
+                }
+            }
+            return nrNormalChars >= minimumCharsInTrunc;
+        }
+
+        protected virtual SrndQuery GetTruncQuery(string truncated)
+        {
+            return new SrndTruncQuery(truncated, truncator, anyChar);
+        }
+
+        public SrndQuery TopSrndQuery()
+        {
+            SrndQuery q;
+            q = FieldsQuery();
+            Jj_consume_token(0);
+            { if (true) return q; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery FieldsQuery()
+        {
+            SrndQuery q;
+            IEnumerable<string> fieldNames;
+            fieldNames = OptionalFields();
+            q = OrQuery();
+            { if (true) return (fieldNames == null) ? q : GetFieldsQuery(q, fieldNames); }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public IEnumerable<string> OptionalFields()
+        {
+            Token fieldName;
+            IList<string> fieldNames = null;
+
+            while (true)
+            {
+                if (Jj_2_1(2))
+                {
+                    ;
+                }
+                else
+                {
+                    goto label_1;
+                }
+                // to the colon
+                fieldName = Jj_consume_token(RegexpToken.TERM);
+                Jj_consume_token(RegexpToken.COLON);
+                if (fieldNames == null)
+                {
+                    fieldNames = new List<string>();
+                }
+                fieldNames.Add(fieldName.image);
+            }
+        label_1:
+            { if (true) return fieldNames; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery OrQuery()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries = null;
+            Token oprt = null;
+            q = AndQuery();
+
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.OR:
+                        ;
+                        break;
+                    default:
+                        jj_la1[0] = jj_gen;
+                        goto label_2;
+                }
+                oprt = Jj_consume_token(RegexpToken.OR);
+                /* keep only last used operator */
+                if (queries == null)
+                {
+                    queries = new List<SrndQuery>();
+                    queries.Add(q);
+                }
+                q = AndQuery();
+                queries.Add(q);
+            }
+        label_2:
+            { if (true) return (queries == null) ? q : GetOrQuery(queries, true /* infix */, oprt); }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery AndQuery()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries = null;
+            Token oprt = null;
+            q = NotQuery();
+
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.AND:
+                        ;
+                        break;
+                    default:
+                        jj_la1[1] = jj_gen;
+                        goto label_3;
+                }
+                oprt = Jj_consume_token(RegexpToken.AND);
+                /* keep only last used operator */
+                if (queries == null)
+                {
+                    queries = new List<SrndQuery>();
+                    queries.Add(q);
+                }
+                q = NotQuery();
+                queries.Add(q);
+            }
+        label_3:
+            { if (true) return (queries == null) ? q : GetAndQuery(queries, true /* infix */, oprt); }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery NotQuery()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries = null;
+            Token oprt = null;
+            q = NQuery();
+
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.NOT:
+                        ;
+                        break;
+                    default:
+                        jj_la1[2] = jj_gen;
+                        goto label_4;
+                }
+                oprt = Jj_consume_token(RegexpToken.NOT);
+                /* keep only last used operator */
+                if (queries == null)
+                {
+                    queries = new List<SrndQuery>();
+                    queries.Add(q);
+                }
+                q = NQuery();
+                queries.Add(q);
+            }
+        label_4:
+            { if (true) return (queries == null) ? q : GetNotQuery(queries, oprt); }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery NQuery()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries;
+            Token dt;
+            q = WQuery();
+
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.N:
+                        ;
+                        break;
+                    default:
+                        jj_la1[3] = jj_gen;
+                        goto label_5;
+                }
+                dt = Jj_consume_token(RegexpToken.N);
+                queries = new List<SrndQuery>();
+                queries.Add(q); /* left associative */
+
+                q = WQuery();
+                queries.Add(q);
+                q = GetDistanceQuery(queries, true /* infix */, dt, false /* not ordered */);
+            }
+        label_5:
+            { if (true) return q; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery WQuery()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries;
+            Token wt;
+            q = PrimaryQuery();
+
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.W:
+                        ;
+                        break;
+                    default:
+                        jj_la1[4] = jj_gen;
+                        goto label_6;
+                }
+                wt = Jj_consume_token(RegexpToken.W);
+                queries = new List<SrndQuery>();
+                queries.Add(q); /* left associative */
+
+                q = PrimaryQuery();
+                queries.Add(q);
+                q = GetDistanceQuery(queries, true /* infix */, wt, true /* ordered */);
+            }
+        label_6:
+            { if (true) return q; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery PrimaryQuery()
+        {
+            /* bracketed weighted query or weighted term */
+            SrndQuery q;
+            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+            {
+                case RegexpToken.LPAREN:
+                    Jj_consume_token(RegexpToken.LPAREN);
+                    q = FieldsQuery();
+                    Jj_consume_token(RegexpToken.RPAREN);
+                    break;
+                case RegexpToken.OR:
+                case RegexpToken.AND:
+                case RegexpToken.W:
+                case RegexpToken.N:
+                    q = PrefixOperatorQuery();
+                    break;
+                case RegexpToken.TRUNCQUOTED:
+                case RegexpToken.QUOTED:
+                case RegexpToken.SUFFIXTERM:
+                case RegexpToken.TRUNCTERM:
+                case RegexpToken.TERM:
+                    q = SimpleTerm();
+                    break;
+                default:
+                    jj_la1[5] = jj_gen;
+                    Jj_consume_token(-1);
+                    throw new ParseException();
+            }
+            OptionalWeights(q);
+            { if (true) return q; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery PrefixOperatorQuery()
+        {
+            Token oprt;
+            IEnumerable<SrndQuery> queries;
+            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+            {
+                case RegexpToken.OR:
+                    oprt = Jj_consume_token(RegexpToken.OR);
+                    /* prefix OR */
+                    queries = FieldsQueryList();
+                    { if (true) return GetOrQuery(queries, false /* not infix */, oprt); }
+                    break;
+                case RegexpToken.AND:
+                    oprt = Jj_consume_token(RegexpToken.AND);
+                    /* prefix AND */
+                    queries = FieldsQueryList();
+                    { if (true) return GetAndQuery(queries, false /* not infix */, oprt); }
+                    break;
+                case RegexpToken.N:
+                    oprt = Jj_consume_token(RegexpToken.N);
+                    /* prefix N */
+                    queries = FieldsQueryList();
+                    { if (true) return GetDistanceQuery(queries, false /* not infix */, oprt, false /* not ordered */); }
+                    break;
+                case RegexpToken.W:
+                    oprt = Jj_consume_token(RegexpToken.W);
+                    /* prefix W */
+                    queries = FieldsQueryList();
+                    { if (true) return GetDistanceQuery(queries, false  /* not infix */, oprt, true /* ordered */); }
+                    break;
+                default:
+                    jj_la1[6] = jj_gen;
+                    Jj_consume_token(-1);
+                    throw new ParseException();
+            }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public IEnumerable<SrndQuery> FieldsQueryList()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries = new List<SrndQuery>();
+            Jj_consume_token(RegexpToken.LPAREN);
+            q = FieldsQuery();
+            queries.Add(q);
+
+            while (true)
+            {
+                Jj_consume_token(RegexpToken.COMMA);
+                q = FieldsQuery();
+                queries.Add(q);
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.COMMA:
+                        ;
+                        break;
+                    default:
+                        jj_la1[7] = jj_gen;
+                        goto label_7;
+                }
+            }
+        label_7:
+            Jj_consume_token(RegexpToken.RPAREN);
+            { if (true) return queries; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery SimpleTerm()
+        {
+            Token term;
+            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+            {
+                case RegexpToken.TERM:
+                    term = Jj_consume_token(RegexpToken.TERM);
+                    { if (true) return GetTermQuery(term.image, false /* not quoted */); }
+                    break;
+                case RegexpToken.QUOTED:
+                    term = Jj_consume_token(RegexpToken.QUOTED);
+                    // TODO: Substring fix
+                    { if (true) return GetTermQuery(term.image.Substring(1, (term.image.Length - 1) - 1), true /* quoted */); }
+                    break;
+                case RegexpToken.SUFFIXTERM:
+                    term = Jj_consume_token(RegexpToken.SUFFIXTERM);
+                    /* ending in * */
+                    if (!AllowedSuffix(term.image))
+                    {
+                        { if (true) throw new ParseException(truncationErrorMessage + term.image); }
+                    }
+                    // TODO: Substring fix
+                    { if (true) return GetPrefixQuery(term.image.Substring(0, term.image.Length - 1), false /* not quoted */); }
+                    break;
+                case RegexpToken.TRUNCTERM:
+                    term = Jj_consume_token(RegexpToken.TRUNCTERM);
+                    /* with at least one * or ? */
+                    if (!AllowedTruncation(term.image))
+                    {
+                        { if (true) throw new ParseException(truncationErrorMessage + term.image); }
+                    }
+                    { if (true) return GetTruncQuery(term.image); }
+                    break;
+                case RegexpToken.TRUNCQUOTED:
+                    term = Jj_consume_token(RegexpToken.TRUNCQUOTED);
+                    /* eg. "9b-b,m"* */
+                    if ((term.image.Length - 3) < minimumPrefixLength)
+                    {
+                        { if (true) throw new ParseException(truncationErrorMessage + term.image); }
+                    }
+                    // TODO: Substring fix
+                    { if (true) return GetPrefixQuery(term.image.Substring(1, (term.image.Length - 2) - 1), true /* quoted */); }
+                    break;
+                default:
+                    jj_la1[8] = jj_gen;
+                    Jj_consume_token(-1);
+                    throw new ParseException();
+            }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public void OptionalWeights(SrndQuery q)
+        {
+            Token weight = null;
+        
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.CARAT:
+                        ;
+                        break;
+                    default:
+                        jj_la1[9] = jj_gen;
+                        goto label_8;
+                }
+                Jj_consume_token(RegexpToken.CARAT);
+                weight = Jj_consume_token(RegexpToken.NUMBER);
+                float f;
+                try
+                {
+                    // TODO: Test parsing float in various cultures (.NET)
+                    f = float.Parse(weight.image);
+                }
+                catch (Exception floatExc)
+                {
+                    { if (true) throw new ParseException(boostErrorMessage + weight.image + " (" + floatExc + ")"); }
+                }
+                if (f <= 0.0)
+                {
+                    { if (true) throw new ParseException(boostErrorMessage + weight.image); }
+                }
+                q.Weight = (f * q.Weight); /* left associative, fwiw */
+            }
+        label_8: ;
+        }
+
+        private bool Jj_2_1(int xla)
+        {
+            jj_la = xla; jj_lastpos = jj_scanpos = token;
+            try { return !Jj_3_1(); }
+            catch (LookaheadSuccess) { return true; }
+            finally { Jj_save(0, xla); }
+        }
+
+        private bool Jj_3_1()
+        {
+            if (Jj_scan_token(RegexpToken.TERM)) return true;
+            if (Jj_scan_token(RegexpToken.COLON)) return true;
+            return false;
+        }
+
+        /** Generated Token Manager. */
+        public QueryParserTokenManager token_source;
+        /** Current token. */
+        public Token token;
+        /** Next token. */
+        public Token jj_nt;
+        private int jj_ntk;
+        private Token jj_scanpos, jj_lastpos;
+        private int jj_la;
+        private int jj_gen;
+        private readonly int[] jj_la1 = new int[10];
+        private static int[] jj_la1_0;
+        static QueryParser()
+        {
+            Jj_la1_init_0();
+        }
+
+        private static void Jj_la1_init_0()
+        {
+            jj_la1_0 = new int[] { 0x100, 0x200, 0x400, 0x1000, 0x800, 0x7c3b00, 0x1b00, 0x8000, 0x7c0000, 0x20000, };
+        }
+        private readonly JJCalls[] jj_2_rtns = new JJCalls[1];
+        private bool jj_rescan = false;
+        private int jj_gc = 0;
+
+        /// <summary>
+        /// Constructor with user supplied CharStream.
+        /// </summary>
+        /// <param name="stream"></param>
+        public QueryParser(ICharStream stream)
+        {
+            token_source = new QueryParserTokenManager(stream);
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        /// <summary>
+        /// Reinitialise.
+        /// </summary>
+        /// <param name="stream"></param>
+        public virtual void ReInit(ICharStream stream)
+        {
+            token_source.ReInit(stream);
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        /// <summary>
+        /// Constructor with generated Token Manager.
+        /// </summary>
+        /// <param name="tm"></param>
+        public QueryParser(QueryParserTokenManager tm)
+        {
+            token_source = tm;
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        /// <summary>
+        /// Reinitialise.
+        /// </summary>
+        /// <param name="tm"></param>
+        public virtual void ReInit(QueryParserTokenManager tm)
+        {
+            token_source = tm;
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        private Token Jj_consume_token(int kind)
+        {
+            Token oldToken;
+            if ((oldToken = token).next != null) token = token.next;
+            else token = token.next = token_source.GetNextToken();
+            jj_ntk = -1;
+            if (token.kind == kind)
+            {
+                jj_gen++;
+                if (++jj_gc > 100)
+                {
+                    jj_gc = 0;
+                    for (int i = 0; i < jj_2_rtns.Length; i++)
+                    {
+                        JJCalls c = jj_2_rtns[i];
+                        while (c != null)
+                        {
+                            if (c.gen < jj_gen) c.first = null;
+                            c = c.next;
+                        }
+                    }
+                }
+                return token;
+            }
+            token = oldToken;
+            jj_kind = kind;
+            throw GenerateParseException();
+        }
+
+        private sealed class LookaheadSuccess : Exception { }
+        private readonly LookaheadSuccess jj_ls = new LookaheadSuccess();
+
+        private bool Jj_scan_token(int kind)
+        {
+            if (jj_scanpos == jj_lastpos)
+            {
+                jj_la--;
+                if (jj_scanpos.next == null)
+                {
+                    jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.GetNextToken();
+                }
+                else
+                {
+                    jj_lastpos = jj_scanpos = jj_scanpos.next;
+                }
+            }
+            else
+            {
+                jj_scanpos = jj_scanpos.next;
+            }
+            if (jj_rescan)
+            {
+                int i = 0; Token tok = token;
+                while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; }
+                if (tok != null) Jj_add_error_token(kind, i);
+            }
+            if (jj_scanpos.kind != kind) return true;
+            if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;
+            return false;
+        }
+
+        /// <summary>
+        /// Get the next Token.
+        /// </summary>
+        /// <returns></returns>
+        public Token GetNextToken()
+        {
+            if (token.next != null) token = token.next;
+            else token = token.next = token_source.GetNextToken();
+            jj_ntk = -1;
+            jj_gen++;
+            return token;
+        }
+
+        /// <summary>
+        /// Get the specific Token.
+        /// </summary>
+        /// <param name="index"></param>
+        /// <returns></returns>
+        public Token GetToken(int index)
+        {
+            Token t = token;
+            for (int i = 0; i < index; i++)
+            {
+                if (t.next != null) t = t.next;
+                else t = t.next = token_source.GetNextToken();
+            }
+            return t;
+        }
+
+        private int Jj_ntk()
+        {
+            if ((jj_nt = token.next) == null)
+                return (jj_ntk = (token.next = token_source.GetNextToken()).kind);
+            else
+                return (jj_ntk = jj_nt.kind);
+        }
+
+        private IList<int[]> jj_expentries = new List<int[]>();
+        private int[] jj_expentry;
+        private int jj_kind = -1;
+        private int[] jj_lasttokens = new int[100];
+        private int jj_endpos;
+
+        private void Jj_add_error_token(int kind, int pos)
+        {
+            if (pos >= 100) return;
+            if (pos == jj_endpos + 1)
+            {
+                jj_lasttokens[jj_endpos++] = kind;
+            }
+            else if (jj_endpos != 0)
+            {
+                jj_expentry = new int[jj_endpos];
+                for (int i = 0; i < jj_endpos; i++)
+                {
+                    jj_expentry[i] = jj_lasttokens[i];
+                }
+                foreach (var oldentry in jj_expentries)
+                {
+                    if (oldentry.Length == jj_expentry.Length)
+                    {
+                        for (int i = 0; i < jj_expentry.Length; i++)
+                        {
+                            if (oldentry[i] != jj_expentry[i])
+                            {
+                                continue;
+                            }
+                        }
+                        jj_expentries.Add(jj_expentry);
+                        break;
+                    }
+                }
+                if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind;
+            }
+        }
+
+        /// <summary>
+        /// Generate ParseException.
+        /// </summary>
+        /// <returns></returns>
+        public virtual ParseException GenerateParseException()
+        {
+            jj_expentries.Clear();
+            bool[] la1tokens = new bool[24];
+            if (jj_kind >= 0)
+            {
+                la1tokens[jj_kind] = true;
+                jj_kind = -1;
+            }
+            for (int i = 0; i < 10; i++)
+            {
+                if (jj_la1[i] == jj_gen)
+                {
+                    for (int j = 0; j < 32; j++)
+                    {
+                        if ((jj_la1_0[i] & (1 << j)) != 0)
+                        {
+                            la1tokens[j] = true;
+                        }
+                    }
+                }
+            }
+            for (int i = 0; i < 24; i++)
+            {
+                if (la1tokens[i])
+                {
+                    jj_expentry = new int[1];
+                    jj_expentry[0] = i;
+                    jj_expentries.Add(jj_expentry);
+                }
+            }
+            jj_endpos = 0;
+            Jj_rescan_token();
+            Jj_add_error_token(0, 0);
+            int[][] exptokseq = new int[jj_expentries.Count][];
+            for (int i = 0; i < jj_expentries.Count; i++)
+            {
+                exptokseq[i] = jj_expentries[i];
+            }
+            return new ParseException(token, exptokseq, QueryParserConstants.TokenImage);
+        }
+
+        /// <summary>Enable tracing. </summary>
+        public void Enable_tracing()
+        {
+        }
+
+        /// <summary>Disable tracing. </summary>
+        public void Disable_tracing()
+        {
+        }
+
+        private void Jj_rescan_token()
+        {
+            jj_rescan = true;
+            for (int i = 0; i < 1; i++)
+            {
+                try
+                {
+                    JJCalls p = jj_2_rtns[i];
+                    do
+                    {
+                        if (p.gen > jj_gen)
+                        {
+                            jj_la = p.arg; jj_lastpos = jj_scanpos = p.first;
+                            switch (i)
+                            {
+                                case 0: Jj_3_1(); break;
+                            }
+                        }
+                        p = p.next;
+                    } while (p != null);
+                }
+                catch (LookaheadSuccess ls) { }
+            }
+            jj_rescan = false;
+        }
+
+        private void Jj_save(int index, int xla)
+        {
+            JJCalls p = jj_2_rtns[index];
+            while (p.gen > jj_gen)
+            {
+                if (p.next == null) { p = p.next = new JJCalls(); break; }
+                p = p.next;
+            }
+            p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla;
+        }
+
+        internal sealed class JJCalls
+        {
+            internal int gen;
+            internal Token first;
+            internal int arg;
+            internal JJCalls next;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs b/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
new file mode 100644
index 0000000..262f76b
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
@@ -0,0 +1,120 @@
+using System;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public static class RegexpToken
+    {
+        /// <summary>End of File. </summary>
+        public const int EOF = 0;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _NUM_CHAR = 1;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _TERM_CHAR = 2;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _WHITESPACE = 3;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _STAR = 4;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _ONE_CHAR = 5;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _DISTOP_NUM = 6;
+        /// <summary>RegularExpression Id. </summary>
+        public const int OR = 8;
+        /// <summary>RegularExpression Id. </summary>
+        public const int AND = 9;
+        /// <summary>RegularExpression Id. </summary>
+        public const int NOT = 10;
+        /// <summary>RegularExpression Id. </summary>
+        public const int W = 11;
+        /// <summary>RegularExpression Id. </summary>
+        public const int N = 12;
+        /// <summary>RegularExpression Id. </summary>
+        public const int LPAREN = 13;
+        /// <summary>RegularExpression Id. </summary>
+        public const int RPAREN = 14;
+        /// <summary>RegularExpression Id. </summary>
+        public const int COMMA = 15;
+        /// <summary>RegularExpression Id. </summary>
+        public const int COLON = 16;
+        /// <summary>RegularExpression Id. </summary>
+        public const int CARAT = 17;
+        /// <summary>RegularExpression Id. </summary>
+        public const int TRUNCQUOTED = 18;
+        /// <summary>RegularExpression Id. </summary>
+        public const int QUOTED = 19;
+        /// <summary>RegularExpression Id. </summary>
+        public const int SUFFIXTERM = 20;
+        /// <summary>RegularExpression Id. </summary>
+        public const int TRUNCTERM = 21;
+        /// <summary>RegularExpression Id. </summary>
+        public const int TERM = 22;
+        /// <summary>RegularExpression Id. </summary>
+        public const int NUMBER = 23;
+    }
+
+    public static class LexicalToken
+    {
+        /// <summary>Lexical state.</summary>
+        public const int Boost = 0;
+        /// <summary>Lexical state.</summary>
+        public const int DEFAULT = 2;
+    }
+
+    // NOTE: In Java, this was an interface. However, in 
+    // .NET we cannot define constants in an interface.
+    // So, instead we are making it a static class so it 
+    // can be shared between classes with different base classes.
+
+    // public interface QueryParserConstants
+
+    /// <summary> Token literal values and constants.
+    /// Generated by org.javacc.parser.OtherFilesGen#start()
+    /// </summary>
+    public static class QueryParserConstants
+    {
+        /// <summary>Literal token values. </summary>
+        public static string[] TokenImage = new string[] {
+            "<EOF>",
+            "<_NUM_CHAR>",
+            "<_TERM_CHAR>",
+            "<_WHITESPACE>",
+            "\"*\"",
+            "\"?\"",
+            "<_DISTOP_NUM>",
+            "<token of kind 7>",
+            "<OR>",
+            "<AND>",
+            "<NOT>",
+            "<W>",
+            "<N>",
+            "\"(\"",
+            "\")\"",
+            "\",\"",
+            "\":\"",
+            "\"^\"",
+            "<TRUNCQUOTED>",
+            "<QUOTED>",
+            "<SUFFIXTERM>",
+            "<TRUNCTERM>",
+            "<TERM>",
+            "<NUMBER>"
+        };
+    }
+}
\ No newline at end of file


Mime
View raw message