lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nightowl...@apache.org
Subject [01/58] lucenenet git commit: WIP on QueryParsers.Flexible
Date Tue, 06 Dec 2016 15:11:36 GMT
Repository: lucenenet
Updated Branches:
  refs/heads/master e8735ed96 -> 701b9ed20


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c83be6be/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestQPHelper.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestQPHelper.cs b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestQPHelper.cs
new file mode 100644
index 0000000..fdf12e7
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestQPHelper.cs
@@ -0,0 +1,1443 @@
+using Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.QueryParsers.Flexible.Core;
+using Lucene.Net.QueryParsers.Flexible.Core.Messages;
+using Lucene.Net.QueryParsers.Flexible.Core.Nodes;
+using Lucene.Net.QueryParsers.Flexible.Core.Processors;
+using Lucene.Net.QueryParsers.Flexible.Messages;
+using Lucene.Net.QueryParsers.Flexible.Standard.Config;
+using Lucene.Net.QueryParsers.Flexible.Standard.Nodes;
+using Lucene.Net.Search;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using Lucene.Net.Util.Automaton;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace Lucene.Net.QueryParsers.Flexible.Standard
+{
+    /// <summary>
+    /// This test case is a copy of the core Lucene query parser test, it was adapted
+    /// to use new QueryParserHelper instead of the old query parser.
+    /// 
+    /// Tests QueryParser.
+    /// </summary>
+    // TODO: really this should extend QueryParserTestBase too!
+    public class TestQPHelper : LuceneTestCase
+    {
+        public static Analyzer qpAnalyzer;
+
+        [TestFixtureSetUp]
+        public static void BeforeClass()
+        {
+            qpAnalyzer = new QPTestAnalyzer();
+        }
+
+        [TestFixtureTearDown]
+        public static void AfterClass()
+        {
+            qpAnalyzer = null;
+        }
+
+        public sealed class QPTestFilter : TokenFilter
+        {
+            private readonly ICharTermAttribute termAtt;
+            private readonly IOffsetAttribute offsetAtt;
+
+            /**
+             * Filter which discards the token 'stop' and which expands the token
+             * 'phrase' into 'phrase1 phrase2'
+             */
+            public QPTestFilter(TokenStream @in)
+                        : base(@in)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+                offsetAtt = AddAttribute<IOffsetAttribute>();
+            }
+
+            private bool inPhrase = false;
+            private int savedStart = 0;
+            private int savedEnd = 0;
+
+
+            public override bool IncrementToken()
+            {
+                if (inPhrase)
+                {
+                    inPhrase = false;
+                    ClearAttributes();
+                    termAtt.SetEmpty().Append("phrase2");
+                    offsetAtt.SetOffset(savedStart, savedEnd);
+                    return true;
+                }
+                else
+                    while (input.IncrementToken())
+                    {
+                        if (termAtt.toString().equals("phrase"))
+                        {
+                            inPhrase = true;
+                            savedStart = offsetAtt.StartOffset();
+                            savedEnd = offsetAtt.EndOffset();
+                            termAtt.SetEmpty().Append("phrase1");
+                            offsetAtt.SetOffset(savedStart, savedEnd);
+                            return true;
+                        }
+                        else if (!termAtt.toString().equals("stop"))
+                            return true;
+                    }
+                return false;
+            }
+
+
+            public override void Reset()
+            {
+                base.Reset();
+                this.inPhrase = false;
+                this.savedStart = 0;
+                this.savedEnd = 0;
+            }
+        }
+
+        public sealed class QPTestAnalyzer : Analyzer
+        {
+
+            /** Filters MockTokenizer with StopFilter. */
+
+            public override sealed TokenStreamComponents CreateComponents(String fieldName, TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+                return new TokenStreamComponents(tokenizer, new QPTestFilter(tokenizer));
+            }
+        }
+
+        public class QPTestParser : StandardQueryParser
+        {
+            public QPTestParser(Analyzer a)
+            {
+                ((QueryNodeProcessorPipeline)GetQueryNodeProcessor())
+                    .Add(new QPTestParserQueryNodeProcessor());
+                this.Analyzer = (a);
+
+            }
+
+            private class QPTestParserQueryNodeProcessor :
+                QueryNodeProcessorImpl
+            {
+
+
+                protected override IQueryNode PostProcessNode(IQueryNode node)
+                {
+
+                    return node;
+
+                }
+
+
+                protected override IQueryNode PreProcessNode(IQueryNode node)
+                {
+
+                    if (node is WildcardQueryNode || node is FuzzyQueryNode)
+                    {
+
+                        throw new QueryNodeException(new MessageImpl(
+                            QueryParserMessages.EMPTY_MESSAGE));
+
+                    }
+
+                    return node;
+
+                }
+
+
+                protected override IList<IQueryNode> SetChildrenOrder(IList<IQueryNode> children)
+                {
+
+                    return children;
+
+                }
+
+            }
+
+        }
+
+        private int originalMaxClauses;
+
+
+        public override void SetUp()
+        {
+            base.SetUp();
+            originalMaxClauses = BooleanQuery.MaxClauseCount;
+        }
+
+        public StandardQueryParser GetParser(Analyzer a)
+        {
+            if (a == null)
+                a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+            StandardQueryParser qp = new StandardQueryParser();
+            qp.Analyzer = (a);
+
+            qp.SetDefaultOperator(/*StandardQueryConfigHandler.*/Operator.OR);
+
+            return qp;
+
+        }
+
+        public Query GetQuery(String query, Analyzer a)
+        {
+            return (Query)GetParser(a).Parse(query, "field");
+        }
+
+        public Query GetQueryAllowLeadingWildcard(String query, Analyzer a)
+        {
+            StandardQueryParser parser = GetParser(a);
+            parser.AllowLeadingWildcard = (true);
+            return (Query)parser.Parse(query, "field");
+        }
+
+        public void assertQueryEquals(String query, Analyzer a, String result)
+        {
+            Query q = GetQuery(query, a);
+            String s = q.ToString("field");
+            if (!s.equals(result))
+            {
+                fail("Query /" + query + "/ yielded /" + s + "/, expecting /" + result
+                    + "/");
+            }
+        }
+
+        public void assertQueryEqualsAllowLeadingWildcard(String query, Analyzer a, String result)
+        {
+            Query q = GetQueryAllowLeadingWildcard(query, a);
+            String s = q.ToString("field");
+            if (!s.equals(result))
+            {
+                fail("Query /" + query + "/ yielded /" + s + "/, expecting /" + result
+                    + "/");
+            }
+        }
+
+        public void assertQueryEquals(StandardQueryParser qp, String field,
+            String query, String result)
+        {
+            Query q = (Query)qp.Parse(query, field);
+            String s = q.ToString(field);
+            if (!s.equals(result))
+            {
+                fail("Query /" + query + "/ yielded /" + s + "/, expecting /" + result
+                    + "/");
+            }
+        }
+
+        public void assertEscapedQueryEquals(String query, Analyzer a, String result)
+
+        {
+            String escapedQuery = QueryParserUtil.Escape(query);
+            if (!escapedQuery.equals(result))
+            {
+                fail("Query /" + query + "/ yielded /" + escapedQuery + "/, expecting /"
+                    + result + "/");
+            }
+        }
+
+        public void assertWildcardQueryEquals(String query, bool lowercase,
+            String result, bool allowLeadingWildcard)
+        {
+            StandardQueryParser qp = GetParser(null);
+            qp.LowercaseExpandedTerms = (lowercase);
+            qp.AllowLeadingWildcard = (allowLeadingWildcard);
+            Query q = (Query)qp.Parse(query, "field");
+            String s = q.ToString("field");
+            if (!s.equals(result))
+            {
+                fail("WildcardQuery /" + query + "/ yielded /" + s + "/, expecting /"
+                    + result + "/");
+            }
+        }
+
+        public void assertWildcardQueryEquals(String query, bool lowercase,
+            String result)
+        {
+            assertWildcardQueryEquals(query, lowercase, result, false);
+        }
+
+        public void assertWildcardQueryEquals(String query, String result)
+        {
+            StandardQueryParser qp = GetParser(null);
+            Query q = (Query)qp.Parse(query, "field");
+            String s = q.ToString("field");
+            if (!s.equals(result))
+            {
+                fail("WildcardQuery /" + query + "/ yielded /" + s + "/, expecting /"
+                    + result + "/");
+            }
+        }
+
+        public Query GetQueryDOA(String query, Analyzer a)
+        {
+            if (a == null)
+                a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+            StandardQueryParser qp = new StandardQueryParser();
+            qp.Analyzer = (a);
+            qp.SetDefaultOperator(/*StandardQueryConfigHandler.*/Operator.AND);
+
+            return (Query)qp.Parse(query, "field");
+
+        }
+
+        public void assertQueryEqualsDOA(String query, Analyzer a, String result)
+        {
+            Query q = GetQueryDOA(query, a);
+            String s = q.ToString("field");
+            if (!s.equals(result))
+            {
+                fail("Query /" + query + "/ yielded /" + s + "/, expecting /" + result
+                    + "/");
+            }
+        }
+        [Test]
+        public void testConstantScoreAutoRewrite()
+        {
+            StandardQueryParser qp = new StandardQueryParser(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+            Query q = (Query)qp.Parse("foo*bar", "field");
+            assertTrue(q is WildcardQuery);
+            assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((MultiTermQuery)q).GetRewriteMethod());
+
+            q = (Query)qp.Parse("foo*", "field");
+            assertTrue(q is PrefixQuery);
+            assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((MultiTermQuery)q).GetRewriteMethod());
+
+            q = (Query)qp.Parse("[a TO z]", "field");
+            assertTrue(q is TermRangeQuery);
+            assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((MultiTermQuery)q).GetRewriteMethod());
+        }
+        [Test]
+        public void testCJK()
+        {
+            // Test Ideographic Space - As wide as a CJK character cell (fullwidth)
+            // used google to translate the word "term" to japanese -> ??
+            assertQueryEquals("term\u3000term\u3000term", null,
+                "term\u0020term\u0020term");
+            assertQueryEqualsAllowLeadingWildcard("??\u3000??\u3000??", null, "??\u0020??\u0020??");
+        }
+
+        //individual CJK chars as terms, like StandardAnalyzer
+        private sealed class SimpleCJKTokenizer : Tokenizer
+        {
+            private ICharTermAttribute termAtt;
+
+            public SimpleCJKTokenizer(TextReader input)
+                        : base(input)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+            }
+
+
+            public override bool IncrementToken()
+            {
+                int ch = input.Read();
+                if (ch < 0)
+                    return false;
+                ClearAttributes();
+                termAtt.SetEmpty().Append((char)ch);
+                return true;
+            }
+        }
+
+        private class SimpleCJKAnalyzer : Analyzer
+        {
+
+            public override TokenStreamComponents CreateComponents(String fieldName, TextReader reader)
+            {
+                return new TokenStreamComponents(new SimpleCJKTokenizer(reader));
+            }
+        }
+        [Test]
+        public void testCJKTerm()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "中")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "国")), BooleanClause.Occur.SHOULD);
+            assertEquals(expected, GetQuery("中国", analyzer));
+
+            expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "中")), BooleanClause.Occur.MUST);
+            BooleanQuery inner = new BooleanQuery();
+            inner.Add(new TermQuery(new Term("field", "中")), BooleanClause.Occur.SHOULD);
+            inner.Add(new TermQuery(new Term("field", "国")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner, BooleanClause.Occur.MUST);
+            assertEquals(expected, GetQuery("中 AND 中国", new SimpleCJKAnalyzer()));
+
+        }
+        [Test]
+        public void testCJKBoostedTerm()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            BooleanQuery expected = new BooleanQuery();
+            expected.Boost = (0.5f);
+            expected.Add(new TermQuery(new Term("field", "中")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "国")), BooleanClause.Occur.SHOULD);
+
+
+            assertEquals(expected, GetQuery("中国^0.5", analyzer));
+        }
+        [Test]
+        public void testCJKPhrase()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            PhraseQuery expected = new PhraseQuery();
+            expected.Add(new Term("field", "中"));
+            expected.Add(new Term("field", "国"));
+
+
+            assertEquals(expected, GetQuery("\"中国\"", analyzer));
+        }
+        [Test]
+        public void testCJKBoostedPhrase()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            PhraseQuery expected = new PhraseQuery();
+            expected.Boost = (0.5f);
+            expected.Add(new Term("field", "中"));
+            expected.Add(new Term("field", "国"));
+
+
+            assertEquals(expected, GetQuery("\"中国\"^0.5", analyzer));
+        }
+        [Test]
+        public void testCJKSloppyPhrase()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            PhraseQuery expected = new PhraseQuery();
+            expected.Slop = (3);
+            expected.Add(new Term("field", "中"));
+            expected.Add(new Term("field", "国"));
+
+
+            assertEquals(expected, GetQuery("\"中国\"~3", analyzer));
+        }
+        [Test]
+        public void testSimple()
+        {
+            assertQueryEquals("field=a", null, "a");
+            assertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2");
+            assertQueryEquals("term term term", null, "term term term");
+            assertQueryEquals("t�rm term term", new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false),
+                "t�rm term term");
+            assertQueryEquals("�mlaut", new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false), "�mlaut");
+
+            // FIXME: change MockAnalyzer to not extend CharTokenizer for this test
+            //assertQueryEquals("\"\"", new KeywordAnalyzer(), "");
+            //assertQueryEquals("foo:\"\"", new KeywordAnalyzer(), "foo:");
+
+            assertQueryEquals("a AND b", null, "+a +b");
+            assertQueryEquals("(a AND b)", null, "+a +b");
+            assertQueryEquals("c OR (a AND b)", null, "c (+a +b)");
+
+            assertQueryEquals("a AND NOT b", null, "+a -b");
+
+            assertQueryEquals("a AND -b", null, "+a -b");
+
+            assertQueryEquals("a AND !b", null, "+a -b");
+
+            assertQueryEquals("a && b", null, "+a +b");
+
+            assertQueryEquals("a && ! b", null, "+a -b");
+
+            assertQueryEquals("a OR b", null, "a b");
+            assertQueryEquals("a || b", null, "a b");
+
+            assertQueryEquals("a OR !b", null, "a -b");
+
+            assertQueryEquals("a OR ! b", null, "a -b");
+
+            assertQueryEquals("a OR -b", null, "a -b");
+
+            assertQueryEquals("+term -term term", null, "+term -term term");
+            assertQueryEquals("foo:term AND field:anotherTerm", null,
+                "+foo:term +anotherterm");
+            assertQueryEquals("term AND \"phrase phrase\"", null,
+                "+term +\"phrase phrase\"");
+            assertQueryEquals("\"hello there\"", null, "\"hello there\"");
+            assertTrue(GetQuery("a AND b", null) is BooleanQuery);
+            assertTrue(GetQuery("hello", null) is TermQuery);
+            assertTrue(GetQuery("\"hello there\"", null) is PhraseQuery);
+
+            assertQueryEquals("germ term^2.0", null, "germ term^2.0");
+            assertQueryEquals("(term)^2.0", null, "term^2.0");
+            assertQueryEquals("(germ term)^2.0", null, "(germ term)^2.0");
+            assertQueryEquals("term^2.0", null, "term^2.0");
+            assertQueryEquals("term^2", null, "term^2.0");
+            assertQueryEquals("\"germ term\"^2.0", null, "\"germ term\"^2.0");
+            assertQueryEquals("\"term germ\"^2", null, "\"term germ\"^2.0");
+
+            assertQueryEquals("(foo OR bar) AND (baz OR boo)", null,
+                "+(foo bar) +(baz boo)");
+            assertQueryEquals("((a OR b) AND NOT c) OR d", null, "(+(a b) -c) d");
+            assertQueryEquals("+(apple \"steve jobs\") -(foo bar baz)", null,
+                "+(apple \"steve jobs\") -(foo bar baz)");
+            assertQueryEquals("+title:(dog OR cat) -author:\"bob dole\"", null,
+                "+(title:dog title:cat) -author:\"bob dole\"");
+
+        }
+        [Test]
+        public void testPunct()
+        {
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+            assertQueryEquals("a&b", a, "a&b");
+            assertQueryEquals("a&&b", a, "a&&b");
+            assertQueryEquals(".NET", a, ".NET");
+        }
+        [Test]
+        public void testGroup()
+        {
+            assertQueryEquals("!(a AND b) OR c", null, "-(+a +b) c");
+            assertQueryEquals("!(a AND b) AND c", null, "-(+a +b) +c");
+            assertQueryEquals("((a AND b) AND c)", null, "+(+a +b) +c");
+            assertQueryEquals("(a AND b) AND c", null, "+(+a +b) +c");
+            assertQueryEquals("b !(a AND b)", null, "b -(+a +b)");
+            assertQueryEquals("(a AND b)^4 OR c", null, "((+a +b)^4.0) c");
+        }
+        [Test]
+        public void testSlop()
+        {
+
+            assertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2");
+            assertQueryEquals("\"term germ\"~2 flork", null, "\"term germ\"~2 flork");
+            assertQueryEquals("\"term\"~2", null, "term");
+            assertQueryEquals("\" \"~2 germ", null, "germ");
+            assertQueryEquals("\"term germ\"~2^2", null, "\"term germ\"~2^2.0");
+        }
+        [Test]
+        public void testNumber()
+        {
+            // The numbers go away because SimpleAnalzyer ignores them
+            assertQueryEquals("3", null, "");
+            assertQueryEquals("term 1.0 1 2", null, "term");
+            assertQueryEquals("term term1 term2", null, "term term term");
+
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+            assertQueryEquals("3", a, "3");
+            assertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2");
+            assertQueryEquals("term term1 term2", a, "term term1 term2");
+        }
+        [Test]
+        public void testWildcard()
+        {
+            assertQueryEquals("term*", null, "term*");
+            assertQueryEquals("term*^2", null, "term*^2.0");
+            assertQueryEquals("term~", null, "term~2");
+            assertQueryEquals("term~0.7", null, "term~1");
+
+            assertQueryEquals("term~^3", null, "term~2^3.0");
+
+            assertQueryEquals("term^3~", null, "term~2^3.0");
+            assertQueryEquals("term*germ", null, "term*germ");
+            assertQueryEquals("term*germ^3", null, "term*germ^3.0");
+
+            assertTrue(GetQuery("term*", null) is PrefixQuery);
+            assertTrue(GetQuery("term*^2", null) is PrefixQuery);
+            assertTrue(GetQuery("term~", null) is FuzzyQuery);
+            assertTrue(GetQuery("term~0.7", null) is FuzzyQuery);
+            FuzzyQuery fq = (FuzzyQuery)GetQuery("term~0.7", null);
+            assertEquals(1, fq.MaxEdits);
+            assertEquals(FuzzyQuery.DefaultPrefixLength, fq.PrefixLength);
+            fq = (FuzzyQuery)GetQuery("term~", null);
+            assertEquals(2, fq.MaxEdits);
+            assertEquals(FuzzyQuery.DefaultPrefixLength, fq.PrefixLength);
+
+            assertQueryNodeException("term~1.1"); // value > 1, throws exception
+
+            assertTrue(GetQuery("term*germ", null) is WildcardQuery);
+
+            /*
+             * Tests to see that wild card terms are (or are not) properly lower-cased
+             * with propery parser configuration
+             */
+            // First prefix queries:
+            // by default, convert to lowercase:
+            assertWildcardQueryEquals("Term*", true, "term*");
+            // explicitly set lowercase:
+            assertWildcardQueryEquals("term*", true, "term*");
+            assertWildcardQueryEquals("Term*", true, "term*");
+            assertWildcardQueryEquals("TERM*", true, "term*");
+            // explicitly disable lowercase conversion:
+            assertWildcardQueryEquals("term*", false, "term*");
+            assertWildcardQueryEquals("Term*", false, "Term*");
+            assertWildcardQueryEquals("TERM*", false, "TERM*");
+            // Then 'full' wildcard queries:
+            // by default, convert to lowercase:
+            assertWildcardQueryEquals("Te?m", "te?m");
+            // explicitly set lowercase:
+            assertWildcardQueryEquals("te?m", true, "te?m");
+            assertWildcardQueryEquals("Te?m", true, "te?m");
+            assertWildcardQueryEquals("TE?M", true, "te?m");
+            assertWildcardQueryEquals("Te?m*gerM", true, "te?m*germ");
+            // explicitly disable lowercase conversion:
+            assertWildcardQueryEquals("te?m", false, "te?m");
+            assertWildcardQueryEquals("Te?m", false, "Te?m");
+            assertWildcardQueryEquals("TE?M", false, "TE?M");
+            assertWildcardQueryEquals("Te?m*gerM", false, "Te?m*gerM");
+            // Fuzzy queries:
+            assertWildcardQueryEquals("Term~", "term~2");
+            assertWildcardQueryEquals("Term~", true, "term~2");
+            assertWildcardQueryEquals("Term~", false, "Term~2");
+            // Range queries:
+
+            // TODO: implement this on QueryParser
+            // Q0002E_INVALID_SYNTAX_CANNOT_PARSE: Syntax Error, cannot parse '[A TO
+            // C]': Lexical error at line 1, column 1. Encountered: "[" (91), after
+            // : ""
+            assertWildcardQueryEquals("[A TO C]", "[a TO c]");
+            assertWildcardQueryEquals("[A TO C]", true, "[a TO c]");
+            assertWildcardQueryEquals("[A TO C]", false, "[A TO C]");
+            // Test suffix queries: first disallow
+            try
+            {
+                assertWildcardQueryEquals("*Term", true, "*term");
+                fail();
+            }
+            catch (QueryNodeException pe)
+            {
+                // expected exception
+            }
+            try
+            {
+                assertWildcardQueryEquals("?Term", true, "?term");
+                fail();
+            }
+            catch (QueryNodeException pe)
+            {
+                // expected exception
+            }
+            // Test suffix queries: then allow
+            assertWildcardQueryEquals("*Term", true, "*term", true);
+            assertWildcardQueryEquals("?Term", true, "?term", true);
+        }
+        [Test]
+        public void testLeadingWildcardType()
+        {
+            StandardQueryParser qp = GetParser(null);
+            qp.AllowLeadingWildcard = (true);
+            assertEquals(typeof(WildcardQuery), qp.Parse("t*erm*", "field").GetType());
+            assertEquals(typeof(WildcardQuery), qp.Parse("?term*", "field").GetType());
+            assertEquals(typeof(WildcardQuery), qp.Parse("*term*", "field").GetType());
+        }
+        [Test]
+        public void testQPA()
+        {
+            assertQueryEquals("term term^3.0 term", qpAnalyzer, "term term^3.0 term");
+            assertQueryEquals("term stop^3.0 term", qpAnalyzer, "term term");
+
+            assertQueryEquals("term term term", qpAnalyzer, "term term term");
+            assertQueryEquals("term +stop term", qpAnalyzer, "term term");
+            assertQueryEquals("term -stop term", qpAnalyzer, "term term");
+
+            assertQueryEquals("drop AND (stop) AND roll", qpAnalyzer, "+drop +roll");
+            assertQueryEquals("term +(stop) term", qpAnalyzer, "term term");
+            assertQueryEquals("term -(stop) term", qpAnalyzer, "term term");
+
+            assertQueryEquals("drop AND stop AND roll", qpAnalyzer, "+drop +roll");
+            assertQueryEquals("term phrase term", qpAnalyzer,
+                "term (phrase1 phrase2) term");
+
+            assertQueryEquals("term AND NOT phrase term", qpAnalyzer,
+                "+term -(phrase1 phrase2) term");
+
+            assertQueryEquals("stop^3", qpAnalyzer, "");
+            assertQueryEquals("stop", qpAnalyzer, "");
+            assertQueryEquals("(stop)^3", qpAnalyzer, "");
+            assertQueryEquals("((stop))^3", qpAnalyzer, "");
+            assertQueryEquals("(stop^3)", qpAnalyzer, "");
+            assertQueryEquals("((stop)^3)", qpAnalyzer, "");
+            assertQueryEquals("(stop)", qpAnalyzer, "");
+            assertQueryEquals("((stop))", qpAnalyzer, "");
+            assertTrue(GetQuery("term term term", qpAnalyzer) is BooleanQuery);
+            assertTrue(GetQuery("term +stop", qpAnalyzer) is TermQuery);
+        }
+        [Test]
+        public void testRange()
+        {
+            assertQueryEquals("[ a TO z]", null, "[a TO z]");
+            assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((TermRangeQuery)GetQuery("[ a TO z]", null)).GetRewriteMethod());
+
+            StandardQueryParser qp = new StandardQueryParser();
+
+            qp.MultiTermRewriteMethod = (MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+            assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE, ((TermRangeQuery)qp.Parse("[ a TO z]", "field")).GetRewriteMethod());
+
+            // test open ranges
+            assertQueryEquals("[ a TO * ]", null, "[a TO *]");
+            assertQueryEquals("[ * TO z ]", null, "[* TO z]");
+            assertQueryEquals("[ * TO * ]", null, "[* TO *]");
+
+
+            assertQueryEquals("field>=a", null, "[a TO *]");
+            assertQueryEquals("field>a", null, "{a TO *]");
+            assertQueryEquals("field<=a", null, "[* TO a]");
+            assertQueryEquals("field<a", null, "[* TO a}");
+
+            // mixing exclude and include bounds
+            assertQueryEquals("{ a TO z ]", null, "{a TO z]");
+            assertQueryEquals("[ a TO z }", null, "[a TO z}");
+            assertQueryEquals("{ a TO * ]", null, "{a TO *]");
+            assertQueryEquals("[ * TO z }", null, "[* TO z}");
+
+
+            assertQueryEquals("[ a TO z ]", null, "[a TO z]");
+            assertQueryEquals("{ a TO z}", null, "{a TO z}");
+            assertQueryEquals("{ a TO z }", null, "{a TO z}");
+            assertQueryEquals("{ a TO z }^2.0", null, "{a TO z}^2.0");
+            assertQueryEquals("[ a TO z] OR bar", null, "[a TO z] bar");
+            assertQueryEquals("[ a TO z] AND bar", null, "+[a TO z] +bar");
+            assertQueryEquals("( bar blar { a TO z}) ", null, "bar blar {a TO z}");
+            assertQueryEquals("gack ( bar blar { a TO z}) ", null,
+                "gack (bar blar {a TO z})");
+        }
+
+        /** for testing DateTools support */
+        private String getDate(String s, DateTools.Resolution resolution)
+
+        {
+            // we use the default Locale since LuceneTestCase randomizes it
+            //DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, Locale.getDefault());
+            //return getDate(df.parse(s), resolution);
+
+            return getDate(DateTime.Parse(s), resolution); // TODO: Locale...
+        }
+
+        /** for testing DateTools support */
+        private String getDate(DateTime d, DateTools.Resolution resolution)
+        {
+            return DateTools.DateToString(d, resolution);
+        }
+
+        private String escapeDateString(String s)
+        {
+            if (s.Contains(" "))
+            {
+                return "\"" + s + "\"";
+            }
+            else
+            {
+                return s;
+            }
+        }
+
+        private String getLocalizedDate(int year, int month, int day)
+        {
+            DateTime d = new DateTime(year, month, day, 23, 59, 59, 999);
+            return d.ToShortDateString();
+
+            //// we use the default Locale/TZ since LuceneTestCase randomizes it
+            //DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, Locale.getDefault());
+            //Calendar calendar = new GregorianCalendar(TimeZone.getDefault(), Locale.getDefault());
+            //calendar.clear();
+            //calendar.set(year, month, day);
+            //calendar.set(Calendar.HOUR_OF_DAY, 23);
+            //calendar.set(Calendar.MINUTE, 59);
+            //calendar.set(Calendar.SECOND, 59);
+            //calendar.set(Calendar.MILLISECOND, 999);
+            //return df.format(calendar.getTime());
+        }
+        [Test]
+        public void testDateRange()
+        {
+            String startDate = getLocalizedDate(2002, 1, 1);
+            String endDate = getLocalizedDate(2002, 1, 4);
+
+            //// we use the default Locale/TZ since LuceneTestCase randomizes it
+            //Calendar endDateExpected = new GregorianCalendar(TimeZone.getDefault(), Locale.getDefault());
+            //endDateExpected.clear();
+            //endDateExpected.set(2002, 1, 4, 23, 59, 59);
+            //endDateExpected.set(Calendar.MILLISECOND, 999);
+
+            // we use the default Locale/TZ since LuceneTestCase randomizes it
+            DateTime endDateExpected = new DateTime(2002, 1, 4, 23, 59, 59, 999, new GregorianCalendar());
+
+            String defaultField = "default";
+            String monthField = "month";
+            String hourField = "hour";
+            StandardQueryParser qp = new StandardQueryParser();
+
+            IDictionary<string, DateTools.Resolution?> dateRes = new Dictionary<string, DateTools.Resolution?>();
+
+            // set a field specific date resolution    
+            dateRes.Put(monthField, DateTools.Resolution.MONTH);
+            qp.SetDateResolution(dateRes);
+
+            // set default date resolution to MILLISECOND
+            qp.SetDateResolution(DateTools.Resolution.MILLISECOND);
+
+            // set second field specific date resolution
+            dateRes.Put(hourField, DateTools.Resolution.HOUR);
+            qp.SetDateResolution(dateRes);
+
+            // for this field no field specific date resolution has been set,
+            // so verify if the default resolution is used
+            assertDateRangeQueryEquals(qp, defaultField, startDate, endDate,
+                endDateExpected/*.getTime()*/, DateTools.Resolution.MILLISECOND);
+
+            // verify if field specific date resolutions are used for these two
+            // fields
+            assertDateRangeQueryEquals(qp, monthField, startDate, endDate,
+                endDateExpected/*.getTime()*/, DateTools.Resolution.MONTH);
+
+            assertDateRangeQueryEquals(qp, hourField, startDate, endDate,
+                endDateExpected/*.getTime()*/, DateTools.Resolution.HOUR);
+        }
+
+        public void assertDateRangeQueryEquals(StandardQueryParser qp,
+            String field, String startDate, String endDate, DateTime endDateInclusive,
+            DateTools.Resolution resolution)
+        {
+            assertQueryEquals(qp, field, field + ":[" + escapeDateString(startDate) + " TO " + escapeDateString(endDate)
+                + "]", "[" + getDate(startDate, resolution) + " TO "
+                + getDate(endDateInclusive, resolution) + "]");
+            assertQueryEquals(qp, field, field + ":{" + escapeDateString(startDate) + " TO " + escapeDateString(endDate)
+                + "}", "{" + getDate(startDate, resolution) + " TO "
+                + getDate(endDate, resolution) + "}");
+        }
+        [Test]
+        public void testEscaped()
+        {
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+
+            /*
+             * assertQueryEquals("\\[brackets", a, "\\[brackets");
+             * assertQueryEquals("\\[brackets", null, "brackets");
+             * assertQueryEquals("\\\\", a, "\\\\"); assertQueryEquals("\\+blah", a,
+             * "\\+blah"); assertQueryEquals("\\(blah", a, "\\(blah");
+             * 
+             * assertQueryEquals("\\-blah", a, "\\-blah"); assertQueryEquals("\\!blah",
+             * a, "\\!blah"); assertQueryEquals("\\{blah", a, "\\{blah");
+             * assertQueryEquals("\\}blah", a, "\\}blah"); assertQueryEquals("\\:blah",
+             * a, "\\:blah"); assertQueryEquals("\\^blah", a, "\\^blah");
+             * assertQueryEquals("\\[blah", a, "\\[blah"); assertQueryEquals("\\]blah",
+             * a, "\\]blah"); assertQueryEquals("\\\"blah", a, "\\\"blah");
+             * assertQueryEquals("\\(blah", a, "\\(blah"); assertQueryEquals("\\)blah",
+             * a, "\\)blah"); assertQueryEquals("\\~blah", a, "\\~blah");
+             * assertQueryEquals("\\*blah", a, "\\*blah"); assertQueryEquals("\\?blah",
+             * a, "\\?blah"); //assertQueryEquals("foo \\&\\& bar", a,
+             * "foo \\&\\& bar"); //assertQueryEquals("foo \\|| bar", a,
+             * "foo \\|| bar"); //assertQueryEquals("foo \\AND bar", a,
+             * "foo \\AND bar");
+             */
+
+            assertQueryEquals("\\*", a, "*");
+
+
+            assertQueryEquals("\\a", a, "a");
+
+            assertQueryEquals("a\\-b:c", a, "a-b:c");
+            assertQueryEquals("a\\+b:c", a, "a+b:c");
+            assertQueryEquals("a\\:b:c", a, "a:b:c");
+            assertQueryEquals("a\\\\b:c", a, "a\\b:c");
+
+            assertQueryEquals("a:b\\-c", a, "a:b-c");
+            assertQueryEquals("a:b\\+c", a, "a:b+c");
+            assertQueryEquals("a:b\\:c", a, "a:b:c");
+            assertQueryEquals("a:b\\\\c", a, "a:b\\c");
+
+            assertQueryEquals("a:b\\-c*", a, "a:b-c*");
+            assertQueryEquals("a:b\\+c*", a, "a:b+c*");
+            assertQueryEquals("a:b\\:c*", a, "a:b:c*");
+
+            assertQueryEquals("a:b\\\\c*", a, "a:b\\c*");
+
+            assertQueryEquals("a:b\\-?c", a, "a:b-?c");
+            assertQueryEquals("a:b\\+?c", a, "a:b+?c");
+            assertQueryEquals("a:b\\:?c", a, "a:b:?c");
+
+            assertQueryEquals("a:b\\\\?c", a, "a:b\\?c");
+
+            assertQueryEquals("a:b\\-c~", a, "a:b-c~2");
+            assertQueryEquals("a:b\\+c~", a, "a:b+c~2");
+            assertQueryEquals("a:b\\:c~", a, "a:b:c~2");
+            assertQueryEquals("a:b\\\\c~", a, "a:b\\c~2");
+
+            // TODO: implement Range queries on QueryParser
+            assertQueryEquals("[ a\\- TO a\\+ ]", null, "[a- TO a+]");
+            assertQueryEquals("[ a\\: TO a\\~ ]", null, "[a: TO a~]");
+            assertQueryEquals("[ a\\\\ TO a\\* ]", null, "[a\\ TO a*]");
+
+            assertQueryEquals(
+                "[\"c\\:\\\\temp\\\\\\~foo0.txt\" TO \"c\\:\\\\temp\\\\\\~foo9.txt\"]",
+                a, "[c:\\temp\\~foo0.txt TO c:\\temp\\~foo9.txt]");
+
+            assertQueryEquals("a\\\\\\+b", a, "a\\+b");
+
+            assertQueryEquals("a \\\"b c\\\" d", a, "a \"b c\" d");
+            assertQueryEquals("\"a \\\"b c\\\" d\"", a, "\"a \"b c\" d\"");
+            assertQueryEquals("\"a \\+b c d\"", a, "\"a +b c d\"");
+
+            assertQueryEquals("c\\:\\\\temp\\\\\\~foo.txt", a, "c:\\temp\\~foo.txt");
+
+            assertQueryNodeException("XY\\"); // there must be a character after the
+                                              // escape char
+
+            // test unicode escaping
+            assertQueryEquals("a\\u0062c", a, "abc");
+            assertQueryEquals("XY\\u005a", a, "XYZ");
+            assertQueryEquals("XY\\u005A", a, "XYZ");
+            assertQueryEquals("\"a \\\\\\u0028\\u0062\\\" c\"", a, "\"a \\(b\" c\"");
+
+            assertQueryNodeException("XY\\u005G"); // test non-hex character in escaped
+                                                   // unicode sequence
+            assertQueryNodeException("XY\\u005"); // test incomplete escaped unicode
+                                                  // sequence
+
+            // Tests bug LUCENE-800
+            assertQueryEquals("(item:\\\\ item:ABCD\\\\)", a, "item:\\ item:ABCD\\");
+            assertQueryNodeException("(item:\\\\ item:ABCD\\\\))"); // unmatched closing
+                                                                    // paranthesis
+            assertQueryEquals("\\*", a, "*");
+            assertQueryEquals("\\\\", a, "\\"); // escaped backslash
+
+            assertQueryNodeException("\\"); // a backslash must always be escaped
+
+            // LUCENE-1189
+            assertQueryEquals("(\"a\\\\\") or (\"b\")", a, "a\\ or b");
+        }
+        [Test]
+        public void testQueryStringEscaping()
+        {
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+
+            assertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c");
+            assertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c");
+            assertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
+            assertEscapedQueryEquals("a\\b:c", a, "a\\\\b\\:c");
+
+            assertEscapedQueryEquals("a:b-c", a, "a\\:b\\-c");
+            assertEscapedQueryEquals("a:b+c", a, "a\\:b\\+c");
+            assertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
+            assertEscapedQueryEquals("a:b\\c", a, "a\\:b\\\\c");
+
+            assertEscapedQueryEquals("a:b-c*", a, "a\\:b\\-c\\*");
+            assertEscapedQueryEquals("a:b+c*", a, "a\\:b\\+c\\*");
+            assertEscapedQueryEquals("a:b:c*", a, "a\\:b\\:c\\*");
+
+            assertEscapedQueryEquals("a:b\\\\c*", a, "a\\:b\\\\\\\\c\\*");
+
+            assertEscapedQueryEquals("a:b-?c", a, "a\\:b\\-\\?c");
+            assertEscapedQueryEquals("a:b+?c", a, "a\\:b\\+\\?c");
+            assertEscapedQueryEquals("a:b:?c", a, "a\\:b\\:\\?c");
+
+            assertEscapedQueryEquals("a:b?c", a, "a\\:b\\?c");
+
+            assertEscapedQueryEquals("a:b-c~", a, "a\\:b\\-c\\~");
+            assertEscapedQueryEquals("a:b+c~", a, "a\\:b\\+c\\~");
+            assertEscapedQueryEquals("a:b:c~", a, "a\\:b\\:c\\~");
+            assertEscapedQueryEquals("a:b\\c~", a, "a\\:b\\\\c\\~");
+
+            assertEscapedQueryEquals("[ a - TO a+ ]", null, "\\[ a \\- TO a\\+ \\]");
+            assertEscapedQueryEquals("[ a : TO a~ ]", null, "\\[ a \\: TO a\\~ \\]");
+            assertEscapedQueryEquals("[ a\\ TO a* ]", null, "\\[ a\\\\ TO a\\* \\]");
+
+            // LUCENE-881
+            assertEscapedQueryEquals("|| abc ||", a, "\\|\\| abc \\|\\|");
+            assertEscapedQueryEquals("&& abc &&", a, "\\&\\& abc \\&\\&");
+        }
+
+        [Test]
+        [Ignore("flexible queryparser shouldn't escape wildcard terms")]
+        public void testEscapedWildcard()
+        {
+            StandardQueryParser qp = new StandardQueryParser();
+            qp.Analyzer = (new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+
+            WildcardQuery q = new WildcardQuery(new Term("field", "foo\\?ba?r"));
+            assertEquals(q, qp.Parse("foo\\?ba?r", "field"));
+        }
+        [Test]
+        public void testTabNewlineCarriageReturn()
+        {
+            assertQueryEqualsDOA("+weltbank +worlbank", null, "+weltbank +worlbank");
+
+            assertQueryEqualsDOA("+weltbank\n+worlbank", null, "+weltbank +worlbank");
+            assertQueryEqualsDOA("weltbank \n+worlbank", null, "+weltbank +worlbank");
+            assertQueryEqualsDOA("weltbank \n +worlbank", null, "+weltbank +worlbank");
+
+            assertQueryEqualsDOA("+weltbank\r+worlbank", null, "+weltbank +worlbank");
+            assertQueryEqualsDOA("weltbank \r+worlbank", null, "+weltbank +worlbank");
+            assertQueryEqualsDOA("weltbank \r +worlbank", null, "+weltbank +worlbank");
+
+            assertQueryEqualsDOA("+weltbank\r\n+worlbank", null, "+weltbank +worlbank");
+            assertQueryEqualsDOA("weltbank \r\n+worlbank", null, "+weltbank +worlbank");
+            assertQueryEqualsDOA("weltbank \r\n +worlbank", null, "+weltbank +worlbank");
+            assertQueryEqualsDOA("weltbank \r \n +worlbank", null,
+                "+weltbank +worlbank");
+
+            assertQueryEqualsDOA("+weltbank\t+worlbank", null, "+weltbank +worlbank");
+            assertQueryEqualsDOA("weltbank \t+worlbank", null, "+weltbank +worlbank");
+            assertQueryEqualsDOA("weltbank \t +worlbank", null, "+weltbank +worlbank");
+        }
+        [Test]
+        public void testSimpleDAO()
+        {
+            assertQueryEqualsDOA("term term term", null, "+term +term +term");
+            assertQueryEqualsDOA("term +term term", null, "+term +term +term");
+            assertQueryEqualsDOA("term term +term", null, "+term +term +term");
+            assertQueryEqualsDOA("term +term +term", null, "+term +term +term");
+            assertQueryEqualsDOA("-term term term", null, "-term +term +term");
+        }
+        [Test]
+        public void testBoost()
+        {
+            CharacterRunAutomaton stopSet = new CharacterRunAutomaton(BasicAutomata.MakeString("on"));
+            Analyzer oneStopAnalyzer = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, stopSet);
+            StandardQueryParser qp = new StandardQueryParser();
+            qp.Analyzer = (oneStopAnalyzer);
+
+            Query q = (Query)qp.Parse("on^1.0", "field");
+            assertNotNull(q);
+            q = (Query)qp.Parse("\"hello\"^2.0", "field");
+            assertNotNull(q);
+            assertEquals(q.Boost, (float)2.0, (float)0.5);
+            q = (Query)qp.Parse("hello^2.0", "field");
+            assertNotNull(q);
+            assertEquals(q.Boost, (float)2.0, (float)0.5);
+            q = (Query)qp.Parse("\"on\"^1.0", "field");
+            assertNotNull(q);
+
+            StandardQueryParser qp2 = new StandardQueryParser();
+            qp2.Analyzer = (new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET));
+
+            q = (Query)qp2.Parse("the^3", "field");
+            // "the" is a stop word so the result is an empty query:
+            assertNotNull(q);
+            assertEquals("", q.toString());
+            assertEquals(1.0f, q.Boost, 0.01f);
+        }
+
+        public void assertQueryNodeException(String queryString)
+        {
+            try
+            {
+                GetQuery(queryString, null);
+            }
+            catch (QueryNodeException expected)
+            {
+                return;
+            }
+            fail("ParseException expected, not thrown");
+        }
+        [Test]
+        public void testException()
+        {
+            assertQueryNodeException("*leadingWildcard"); // disallowed by default
+            assertQueryNodeException("\"some phrase");
+            assertQueryNodeException("(foo bar");
+            assertQueryNodeException("foo bar))");
+            assertQueryNodeException("field:term:with:colon some more terms");
+            assertQueryNodeException("(sub query)^5.0^2.0 plus more");
+            assertQueryNodeException("secret AND illegal) AND access:confidential");
+        }
+        [Test]
+        public void testCustomQueryParserWildcard()
+        {
+            try
+            {
+                new QPTestParser(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).Parse("a?t", "contents");
+                fail("Wildcard queries should not be allowed");
+            }
+            catch (QueryNodeException expected)
+            {
+                // expected exception
+            }
+        }
+        [Test]
+        public void testCustomQueryParserFuzzy()
+        {
+            try
+            {
+                new QPTestParser(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).Parse("xunit~", "contents");
+                fail("Fuzzy queries should not be allowed");
+            }
+            catch (QueryNodeException expected)
+            {
+                // expected exception
+            }
+        }
+
+        [Test]
+        public void testBooleanQuery()
+        {
+            BooleanQuery.MaxClauseCount = (2);
+            try
+            {
+                StandardQueryParser qp = new StandardQueryParser();
+                qp.Analyzer = (new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+
+                qp.Parse("one two three", "field");
+                fail("ParseException expected due to too many boolean clauses");
+            }
+            catch (QueryNodeException expected)
+            {
+                // too many boolean clauses, so ParseException is expected
+            }
+        }
+
+        /**
+         * This test differs from TestPrecedenceQueryParser
+         */
+        [Test]
+        public void testPrecedence()
+        {
+            StandardQueryParser qp = new StandardQueryParser();
+            qp.Analyzer = (new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+
+            Query query1 = (Query)qp.Parse("A AND B OR C AND D", "field");
+            Query query2 = (Query)qp.Parse("+A +B +C +D", "field");
+
+            assertEquals(query1, query2);
+        }
+
+        // [Test]
+        // Todo: Convert from DateField to DateUtil
+        //  public void testLocalDateFormat() throws IOException, QueryNodeException {
+        //    Directory ramDir = newDirectory();
+        //    IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
+        //    addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
+        //    addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
+        //    iw.close();
+        //    IndexSearcher is = new IndexSearcher(ramDir, true);
+        //    assertHits(1, "[12/1/2005 TO 12/3/2005]", is);
+        //    assertHits(2, "[12/1/2005 TO 12/4/2005]", is);
+        //    assertHits(1, "[12/3/2005 TO 12/4/2005]", is);
+        //    assertHits(1, "{12/1/2005 TO 12/3/2005}", is);
+        //    assertHits(1, "{12/1/2005 TO 12/4/2005}", is);
+        //    assertHits(0, "{12/3/2005 TO 12/4/2005}", is);
+        //    is.close();
+        //    ramDir.close();
+        //  }
+        //
+        //  private void addDateDoc(String content, int year, int month, int day,
+        //                          int hour, int minute, int second, IndexWriter iw) throws IOException {
+        //    Document d = new Document();
+        //    d.add(newField("f", content, Field.Store.YES, Field.Index.ANALYZED));
+        //    Calendar cal = Calendar.getInstance(Locale.ENGLISH);
+        //    cal.set(year, month - 1, day, hour, minute, second);
+        //    d.add(newField("date", DateField.dateToString(cal.getTime()),
+        //        Field.Store.YES, Field.Index.NOT_ANALYZED));
+        //    iw.addDocument(d);
+        //  }
+
+        [Test]
+        public void testStarParsing()
+        {
+            // final int[] type = new int[1];
+            // StandardQueryParser qp = new StandardQueryParser("field", new
+            // WhitespaceAnalyzer()) {
+            // protected Query getWildcardQuery(String field, String termStr) throws
+            // ParseException {
+            // // override error checking of superclass
+            // type[0]=1;
+            // return new TermQuery(new Term(field,termStr));
+            // }
+            // protected Query getPrefixQuery(String field, String termStr) throws
+            // ParseException {
+            // // override error checking of superclass
+            // type[0]=2;
+            // return new TermQuery(new Term(field,termStr));
+            // }
+            //
+            // protected Query getFieldQuery(String field, String queryText) throws
+            // ParseException {
+            // type[0]=3;
+            // return super.getFieldQuery(field, queryText);
+            // }
+            // };
+            //
+            // TermQuery tq;
+            //
+            // tq = (TermQuery)qp.parse("foo:zoo*");
+            // assertEquals("zoo",tq.getTerm().text());
+            // assertEquals(2,type[0]);
+            //
+            // tq = (TermQuery)qp.parse("foo:zoo*^2");
+            // assertEquals("zoo",tq.getTerm().text());
+            // assertEquals(2,type[0]);
+            // assertEquals(tq.getBoost(),2,0);
+            //
+            // tq = (TermQuery)qp.parse("foo:*");
+            // assertEquals("*",tq.getTerm().text());
+            // assertEquals(1,type[0]); // could be a valid prefix query in the
+            // future too
+            //
+            // tq = (TermQuery)qp.parse("foo:*^2");
+            // assertEquals("*",tq.getTerm().text());
+            // assertEquals(1,type[0]);
+            // assertEquals(tq.getBoost(),2,0);
+            //
+            // tq = (TermQuery)qp.parse("*:foo");
+            // assertEquals("*",tq.getTerm().field());
+            // assertEquals("foo",tq.getTerm().text());
+            // assertEquals(3,type[0]);
+            //
+            // tq = (TermQuery)qp.parse("*:*");
+            // assertEquals("*",tq.getTerm().field());
+            // assertEquals("*",tq.getTerm().text());
+            // assertEquals(1,type[0]); // could be handled as a prefix query in the
+            // future
+            //
+            // tq = (TermQuery)qp.parse("(*:*)");
+            // assertEquals("*",tq.getTerm().field());
+            // assertEquals("*",tq.getTerm().text());
+            // assertEquals(1,type[0]);
+
+        }
+        [Test]
+        public void testRegexps()
+        {
+            StandardQueryParser qp = new StandardQueryParser();
+            String df = "field";
+            RegexpQuery q = new RegexpQuery(new Term("field", "[a-z][123]"));
+            assertEquals(q, qp.Parse("/[a-z][123]/", df));
+            qp.LowercaseExpandedTerms = (true);
+            assertEquals(q, qp.Parse("/[A-Z][123]/", df));
+            q.Boost = (0.5f);
+            assertEquals(q, qp.Parse("/[A-Z][123]/^0.5", df));
+            qp.MultiTermRewriteMethod = (MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+            q.SetRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE); // LUCENENET TODO: Inconsistent API betwen RegexpQuery and StandardQueryParser
+            assertTrue(qp.Parse("/[A-Z][123]/^0.5", df) is RegexpQuery);
+            assertEquals(q, qp.Parse("/[A-Z][123]/^0.5", df));
+            assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE, ((RegexpQuery)qp.Parse("/[A-Z][123]/^0.5", df)).GetRewriteMethod());
+            qp.MultiTermRewriteMethod = (MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT);
+
+            Query escaped = new RegexpQuery(new Term("field", "[a-z]\\/[123]"));
+            assertEquals(escaped, qp.Parse("/[a-z]\\/[123]/", df));
+            Query escaped2 = new RegexpQuery(new Term("field", "[a-z]\\*[123]"));
+            assertEquals(escaped2, qp.Parse("/[a-z]\\*[123]/", df));
+
+            BooleanQuery complex = new BooleanQuery();
+            complex.Add(new RegexpQuery(new Term("field", "[a-z]\\/[123]")), BooleanClause.Occur.MUST);
+            complex.Add(new TermQuery(new Term("path", "/etc/init.d/")), BooleanClause.Occur.MUST);
+            complex.Add(new TermQuery(new Term("field", "/etc/init[.]d/lucene/")), BooleanClause.Occur.SHOULD);
+            assertEquals(complex, qp.Parse("/[a-z]\\/[123]/ AND path:\"/etc/init.d/\" OR \"/etc\\/init\\[.\\]d/lucene/\" ", df));
+
+            Query re = new RegexpQuery(new Term("field", "http.*"));
+            assertEquals(re, qp.Parse("field:/http.*/", df));
+            assertEquals(re, qp.Parse("/http.*/", df));
+
+            re = new RegexpQuery(new Term("field", "http~0.5"));
+            assertEquals(re, qp.Parse("field:/http~0.5/", df));
+            assertEquals(re, qp.Parse("/http~0.5/", df));
+
+            re = new RegexpQuery(new Term("field", "boo"));
+            assertEquals(re, qp.Parse("field:/boo/", df));
+            assertEquals(re, qp.Parse("/boo/", df));
+
+
+            assertEquals(new TermQuery(new Term("field", "/boo/")), qp.Parse("\"/boo/\"", df));
+            assertEquals(new TermQuery(new Term("field", "/boo/")), qp.Parse("\\/boo\\/", df));
+
+            BooleanQuery two = new BooleanQuery();
+            two.Add(new RegexpQuery(new Term("field", "foo")), BooleanClause.Occur.SHOULD);
+            two.Add(new RegexpQuery(new Term("field", "bar")), BooleanClause.Occur.SHOULD);
+            assertEquals(two, qp.Parse("field:/foo/ field:/bar/", df));
+            assertEquals(two, qp.Parse("/foo/ /bar/", df));
+        }
+        [Test]
+        public void testStopwords()
+        {
+            StandardQueryParser qp = new StandardQueryParser();
+            CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").ToAutomaton());
+            qp.Analyzer = (new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, stopSet));
+
+            Query result = (Query)qp.Parse("a:the OR a:foo", "a");
+            assertNotNull("result is null and it shouldn't be", result);
+            assertTrue("result is not a BooleanQuery", result is BooleanQuery);
+            assertTrue(((BooleanQuery)result).Clauses.size() + " does not equal: "
+                + 0, ((BooleanQuery)result).Clauses.size() == 0);
+            result = (Query)qp.Parse("a:woo OR a:the", "a");
+            assertNotNull("result is null and it shouldn't be", result);
+            assertTrue("result is not a TermQuery", result is TermQuery);
+            result = (Query)qp.Parse(
+                    "(fieldX:xxxxx OR fieldy:xxxxxxxx)^2 AND (fieldx:the OR fieldy:foo)",
+                    "a");
+            assertNotNull("result is null and it shouldn't be", result);
+            assertTrue("result is not a BooleanQuery", result is BooleanQuery);
+            if (VERBOSE)
+                Console.WriteLine("Result: " + result);
+            assertTrue(((BooleanQuery)result).Clauses.size() + " does not equal: "
+                + 2, ((BooleanQuery)result).Clauses.size() == 2);
+        }
+        [Test]
+        public void testPositionIncrement()
+        {
+            StandardQueryParser qp = new StandardQueryParser();
+            qp.Analyzer = (
+                    new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET));
+
+            qp.EnablePositionIncrements = (true);
+
+            String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\"";
+            // 0 2 5 7 8
+            int[] expectedPositions = { 1, 3, 4, 6, 9 };
+            PhraseQuery pq = (PhraseQuery)qp.Parse(qtxt, "a");
+            // System.out.println("Query text: "+qtxt);
+            // System.out.println("Result: "+pq);
+            Term[] t = pq.Terms;
+            int[] pos = pq.Positions;
+            for (int i = 0; i < t.Length; i++)
+            {
+                // System.out.println(i+". "+t[i]+"  pos: "+pos[i]);
+                assertEquals("term " + i + " = " + t[i] + " has wrong term-position!",
+                    expectedPositions[i], pos[i]);
+            }
+        }
+        [Test]
+        public void testMatchAllDocs()
+        {
+            StandardQueryParser qp = new StandardQueryParser();
+            qp.Analyzer = (new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+
+            assertEquals(new MatchAllDocsQuery(), qp.Parse("*:*", "field"));
+            assertEquals(new MatchAllDocsQuery(), qp.Parse("(*:*)", "field"));
+            BooleanQuery bq = (BooleanQuery)qp.Parse("+*:* -*:*", "field");
+            assertTrue(bq.GetClauses()[0].Query is MatchAllDocsQuery);
+            assertTrue(bq.GetClauses()[1].Query is MatchAllDocsQuery);
+        }
+
+        private void assertHits(int expected, String query, IndexSearcher @is)
+        {
+            StandardQueryParser qp = new StandardQueryParser();
+            qp.Analyzer = (new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+            qp.Locale = new CultureInfo("en");//  (Locale.ENGLISH); // LUCENENET TODO: Fix API - we probably don't want to set Culture to a property
+
+            Query q = (Query)qp.Parse(query, "date");
+            ScoreDoc[] hits = @is.Search(q, null, 1000).ScoreDocs;
+            assertEquals(expected, hits.Length);
+        }
+
+
+        public override void TearDown()
+        {
+            BooleanQuery.MaxClauseCount = (originalMaxClauses);
+            base.TearDown();
+        }
+
+        private sealed class CannedTokenizer : Tokenizer
+        {
+            private int upto = 0;
+            private readonly IPositionIncrementAttribute posIncr;
+            private readonly ICharTermAttribute term;
+
+            public CannedTokenizer(TextReader reader)
+                        : base(reader)
+            {
+                posIncr = AddAttribute<IPositionIncrementAttribute>();
+                term = AddAttribute<ICharTermAttribute>();
+            }
+
+
+            public override bool IncrementToken()
+            {
+                ClearAttributes();
+                if (upto == 4)
+                {
+                    return false;
+                }
+                if (upto == 0)
+                {
+                    posIncr.PositionIncrement = (1);
+                    term.SetEmpty().Append("a");
+                }
+                else if (upto == 1)
+                {
+                    posIncr.PositionIncrement = (1);
+                    term.SetEmpty().Append("b");
+                }
+                else if (upto == 2)
+                {
+                    posIncr.PositionIncrement = (0);
+                    term.SetEmpty().Append("c");
+                }
+                else
+                {
+                    posIncr.PositionIncrement = (0);
+                    term.SetEmpty().Append("d");
+                }
+                upto++;
+                return true;
+            }
+
+
+            public override void Reset()
+            {
+                base.Reset();
+                this.upto = 0;
+            }
+        }
+
+        private class CannedAnalyzer : Analyzer
+        {
+
+            public override TokenStreamComponents CreateComponents(String ignored, TextReader alsoIgnored)
+            {
+                return new TokenStreamComponents(new CannedTokenizer(alsoIgnored));
+            }
+        }
+        [Test]
+        public void testMultiPhraseQuery()
+        {
+            Store.Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new CannedAnalyzer()));
+            Document doc = new Document();
+            doc.Add(NewTextField("field", "", Field.Store.NO));
+            w.AddDocument(doc);
+            IndexReader r = DirectoryReader.Open(w, true);
+            IndexSearcher s = NewSearcher(r);
+
+            Query q = (Query)new StandardQueryParser(new CannedAnalyzer()).Parse("\"a\"", "field");
+            assertTrue(q is MultiPhraseQuery);
+            assertEquals(1, s.Search(q, 10).TotalHits);
+            r.Dispose();
+            w.Dispose();
+            dir.Dispose();
+        }
+        [Test]
+        public void testRegexQueryParsing()
+        {
+            String[]
+            fields = { "b", "t" };
+
+            StandardQueryParser parser = new StandardQueryParser();
+            parser.SetMultiFields(fields);
+            parser.SetDefaultOperator(/*StandardQueryConfigHandler.*/Operator.AND);
+            parser.Analyzer = (new MockAnalyzer(Random()));
+
+            BooleanQuery exp = new BooleanQuery();
+            exp.Add(new BooleanClause(new RegexpQuery(new Term("b", "ab.+")), BooleanClause.Occur.SHOULD));//TODO spezification? was "MUST"
+            exp.Add(new BooleanClause(new RegexpQuery(new Term("t", "ab.+")), BooleanClause.Occur.SHOULD));//TODO spezification? was "MUST"
+
+            assertEquals(exp, parser.Parse("/ab.+/", null));
+
+            RegexpQuery regexpQueryexp = new RegexpQuery(new Term("test", "[abc]?[0-9]"));
+
+            assertEquals(regexpQueryexp, parser.Parse("test:/[abc]?[0-9]/", null));
+
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c83be6be/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs
new file mode 100644
index 0000000..cb9bfe5
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Flexible/Standard/TestStandardQP.cs
@@ -0,0 +1,235 @@
+using Lucene.Net.Analysis;
+using Lucene.Net.QueryParsers.Flexible.Core;
+using Lucene.Net.QueryParsers.Flexible.Standard;
+using Lucene.Net.QueryParsers.Util;
+using Lucene.Net.Search;
+using Lucene.Net.Support;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using static Lucene.Net.Documents.DateTools;
+using static Lucene.Net.QueryParsers.Classic.QueryParserBase;
+using System.IO;
+using Lucene.Net.Index;
+using Lucene.Net.QueryParsers.Flexible.Standard.Config;
+using NUnit.Framework;
+
+namespace Lucene.Net.QueryParsers.Flexible.Standard
+{
+    /// <summary>
+    /// Tests QueryParser.
+    /// </summary>
+    public class TestStandardQP : QueryParserTestBase
+    {
+        public StandardQueryParser GetParser(Analyzer a)
+        {
+            if (a == null) a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+            StandardQueryParser qp = new StandardQueryParser(a);
+            qp.SetDefaultOperator(Config.Operator.OR);
+
+            return qp;
+        }
+
+        public Query Parse(String query, StandardQueryParser qp)
+        {
+            return (Query)qp.Parse(query, DefaultField);
+        }
+
+
+        public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a)
+        {
+            return GetParser(a);
+        }
+
+
+        public override Query GetQuery(String query, ICommonQueryParserConfiguration cqpC)
+        {
+            Debug.Assert(cqpC != null, "Parameter must not be null");
+            Debug.Assert((cqpC is StandardQueryParser), "Parameter must be instance of StandardQueryParser");
+            StandardQueryParser qp = (StandardQueryParser)cqpC;
+            return Parse(query, qp);
+        }
+
+
+        public override Query GetQuery(String query, Analyzer a)
+        {
+            return Parse(query, GetParser(a));
+        }
+
+
+        public override bool IsQueryParserException(Exception exception)
+        {
+            return exception is QueryNodeException;
+        }
+
+
+        public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC)
+        {
+            Debug.Assert(cqpC is StandardQueryParser);
+            StandardQueryParser qp = (StandardQueryParser)cqpC;
+            qp.SetDefaultOperator(Config.Operator.OR);
+        }
+
+
+        public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC)
+        {
+            Debug.Assert(cqpC is StandardQueryParser);
+            StandardQueryParser qp = (StandardQueryParser)cqpC;
+            qp.SetDefaultOperator(Config.Operator.AND);
+        }
+
+
+        public override void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC,
+            bool value)
+        {
+            throw new NotSupportedException();
+        }
+
+
+        public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC,
+            bool value)
+        {
+            throw new NotSupportedException();
+        }
+
+
+        public override void SetDateResolution(ICommonQueryParserConfiguration cqpC,
+            string field, Resolution value)
+        {
+            Debug.Assert(cqpC is StandardQueryParser);
+            StandardQueryParser qp = (StandardQueryParser)cqpC;
+            qp.GetDateResolutionMap().Put(field, value);
+        }
+
+
+        internal class TestOperatorVsWhiteSpaceAnalyzer : Analyzer
+        {
+            public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                return new TokenStreamComponents(new MockTokenizer(reader,
+                    MockTokenizer.WHITESPACE, false));
+            }
+        }
+        [Test]
+        public override void TestOperatorVsWhitespace()
+        {
+            // LUCENE-2566 is not implemented for StandardQueryParser
+            // TODO implement LUCENE-2566 and remove this (override)method
+            Analyzer a = new TestOperatorVsWhiteSpaceAnalyzer();
+            //        Analyzer a = new Analyzer()
+            //    {
+            //        @Override
+            //      public TokenStreamComponents createComponents(String fieldName,
+            //          Reader reader)
+            //    {
+            //        return new TokenStreamComponents(new MockTokenizer(reader,
+            //            MockTokenizer.WHITESPACE, false));
+            //    }
+            //};
+            AssertQueryEquals("a - b", a, "a -b");
+            AssertQueryEquals("a + b", a, "a +b");
+            AssertQueryEquals("a ! b", a, "a -b");
+        }
+
+        [Test]
+        public override void TestRangeWithPhrase()
+        {
+            // StandardSyntaxParser does not differentiate between a term and a
+            // one-term-phrase in a range query.
+            // Is this an issue? Should StandardSyntaxParser mark the text as
+            // wasEscaped=true ?
+            AssertQueryEquals("[\\* TO \"*\"]", null, "[\\* TO *]");
+        }
+
+        [Test]
+        public override void TestEscapedVsQuestionMarkAsWildcard()
+        {
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+            AssertQueryEquals("a:b\\-?c", a, "a:b-?c");
+            AssertQueryEquals("a:b\\+?c", a, "a:b+?c");
+            AssertQueryEquals("a:b\\:?c", a, "a:b:?c");
+
+
+            AssertQueryEquals("a:b\\\\?c", a, "a:b\\?c");
+        }
+
+        [Test]
+        public override void TestEscapedWildcard()
+        {
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+            WildcardQuery q = new WildcardQuery(new Term("field", "foo?ba?r"));//TODO not correct!!
+            assertEquals(q, GetQuery("foo\\?ba?r", qp));
+        }
+
+
+        [Test]
+        public override void TestCollatedRange()
+        {
+            try
+            {
+                SetAnalyzeRangeTerms(GetParser(null), true);
+                base.TestCollatedRange();
+            }
+            catch (NotSupportedException e)
+            {
+                // expected
+            }
+        }
+
+        [Test]
+        public override void TestAutoGeneratePhraseQueriesOn()
+        {
+            try
+            {
+                SetAutoGeneratePhraseQueries(GetParser(null), true);
+                base.TestAutoGeneratePhraseQueriesOn();
+            }
+            catch (NotSupportedException e)
+            {
+                // expected
+            }
+        }
+
+        [Test]
+        public override void TestStarParsing()
+        {
+        }
+
+        [Test]
+        public override void TestDefaultOperator()
+        {
+            StandardQueryParser qp = GetParser(new MockAnalyzer(Random()));
+            // make sure OR is the default:
+            assertEquals(/*StandardQueryConfigHandler.*/Config.Operator.OR, qp.GetDefaultOperator());
+            SetDefaultOperatorAND(qp);
+            assertEquals(/*StandardQueryConfigHandler.*/Config.Operator.AND, qp.GetDefaultOperator());
+            SetDefaultOperatorOR(qp);
+            assertEquals(/*StandardQueryConfigHandler.*/Config.Operator.OR, qp.GetDefaultOperator());
+        }
+
+
+        [Test]
+        public override void TestNewFieldQuery()
+        {
+            /** ordinary behavior, synonyms form uncoordinated boolean query */
+            StandardQueryParser dumb = GetParser(new Analyzer1());
+            BooleanQuery expanded = new BooleanQuery(true);
+            expanded.Add(new TermQuery(new Term("field", "dogs")),
+                    BooleanClause.Occur.SHOULD);
+            expanded.Add(new TermQuery(new Term("field", "dog")),
+                BooleanClause.Occur.SHOULD);
+            assertEquals(expanded, dumb.Parse("\"dogs\"", "field"));
+            /** even with the phrase operator the behavior is the same */
+            assertEquals(expanded, dumb.Parse("dogs", "field"));
+
+            /**
+             * custom behavior, the synonyms are expanded, unless you use quote operator
+             */
+            //TODO test something like "SmartQueryParser()"
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c83be6be/src/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj b/src/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
index 65b69ec..4ba267d 100644
--- a/src/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
+++ b/src/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
@@ -50,6 +50,31 @@
     <Compile Include="Ext\ExtensionStub.cs" />
     <Compile Include="Ext\TestExtendableQueryParser.cs" />
     <Compile Include="Ext\TestExtensions.cs" />
+    <Compile Include="Flexible\Core\Builders\TestQueryTreeBuilder.cs" />
+    <Compile Include="Flexible\Core\Nodes\TestQueryNode.cs" />
+    <Compile Include="Flexible\Messages\MessagesTestBundle.cs" />
+    <Compile Include="Flexible\Messages\MessagesTestBundle.Designer.cs">
+      <AutoGen>True</AutoGen>
+      <DesignTime>True</DesignTime>
+      <DependentUpon>MessagesTestBundle.resx</DependentUpon>
+    </Compile>
+    <Compile Include="Flexible\Messages\TestNLS.cs" />
+    <Compile Include="Flexible\Precedence\TestPrecedenceQueryParser.cs" />
+    <Compile Include="Flexible\Spans\SpanOrQueryNodeBuilder.cs" />
+    <Compile Include="Flexible\Spans\SpansQueryConfigHandler.cs" />
+    <Compile Include="Flexible\Spans\SpansQueryTreeBuilder.cs" />
+    <Compile Include="Flexible\Spans\SpansValidatorQueryNodeProcessor.cs" />
+    <Compile Include="Flexible\Spans\SpanTermQueryNodeBuilder.cs" />
+    <Compile Include="Flexible\Spans\TestSpanQueryParser.cs" />
+    <Compile Include="Flexible\Spans\TestSpanQueryParserSimpleSample.cs" />
+    <Compile Include="Flexible\Spans\UniqueFieldAttribute.cs" />
+    <Compile Include="Flexible\Spans\UniqueFieldAttributeImpl.cs" />
+    <Compile Include="Flexible\Spans\UniqueFieldQueryNodeProcessor.cs" />
+    <Compile Include="Flexible\Standard\TestMultiAnalyzerQPHelper.cs" />
+    <Compile Include="Flexible\Standard\TestMultiFieldQPHelper.cs" />
+    <Compile Include="Flexible\Standard\TestNumericQueryParser.cs" />
+    <Compile Include="Flexible\Standard\TestQPHelper.cs" />
+    <Compile Include="Flexible\Standard\TestStandardQP.cs" />
     <Compile Include="Properties\AssemblyInfo.cs" />
     <Compile Include="Classic\TestMultiAnalyzer.cs" />
     <Compile Include="Simple\TestSimpleQueryParser.cs" />
@@ -86,6 +111,16 @@
   <ItemGroup>
     <Service Include="{82A7F48D-3B50-4B1E-B82E-3ADA8210C358}" />
   </ItemGroup>
+  <ItemGroup>
+    <EmbeddedResource Include="Flexible\Messages\MessagesTestBundle.ja-JP.resx">
+      <DependentUpon>MessagesTestBundle.cs</DependentUpon>
+    </EmbeddedResource>
+    <EmbeddedResource Include="Flexible\Messages\MessagesTestBundle.resx">
+      <DependentUpon>MessagesTestBundle.cs</DependentUpon>
+      <Generator>ResXFileCodeGenerator</Generator>
+      <LastGenOutput>MessagesTestBundle.Designer.cs</LastGenOutput>
+    </EmbeddedResource>
+  </ItemGroup>
   <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
   <!-- To modify your build process, add your task inside one of the targets below and uncomment it. 
        Other similar extension points exist, see Microsoft.Common.targets.


Mime
View raw message