You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by sy...@apache.org on 2016/09/11 21:30:32 UTC

[01/50] [abbrv] lucenenet git commit: Ported tests for the QueryParser.Classic namespace and refactored QueryParserTestBase so the test runner will run all of the tests.

Repository: lucenenet
Updated Branches:
  refs/heads/master 36dc55f8c -> 1946bf82c


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6d711567/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs b/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
new file mode 100644
index 0000000..40aa777
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
@@ -0,0 +1,1499 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.QueryParser.Classic;
+using Lucene.Net.QueryParser.Flexible.Standard;
+using Lucene.Net.Search;
+using Lucene.Net.Store;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using Lucene.Net.Util.Automaton;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace Lucene.Net.QueryParser.Util
+{
+    /// <summary>
+    /// In .NET the abstact members were moved to AbstractQueryParserTestBase
+    /// because the Visual Studio test runner does not find or run tests in 
+    /// abstract classes.
+    /// </summary>
+    [TestFixture]
+    public class QueryParserTestBase : AbstractQueryParserTestBase
+    {
+        public static Analyzer qpAnalyzer;
+
+        [TestFixtureSetUp]
+        public static void BeforeClass()
+        {
+            qpAnalyzer = new QPTestAnalyzer();
+        }
+
+        [TestFixtureTearDown]
+        public static void AfterClass()
+        {
+            qpAnalyzer = null;
+        }
+
+        public sealed class QPTestFilter : TokenFilter
+        {
+            ICharTermAttribute termAtt;
+            IOffsetAttribute offsetAtt;
+
+            /**
+             * Filter which discards the token 'stop' and which expands the
+             * token 'phrase' into 'phrase1 phrase2'
+             */
+            public QPTestFilter(TokenStream @in)
+                : base(@in)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+                offsetAtt = AddAttribute<IOffsetAttribute>();
+            }
+
+            bool inPhrase = false;
+            int savedStart = 0, savedEnd = 0;
+
+            public override bool IncrementToken()
+            {
+                if (inPhrase)
+                {
+                    inPhrase = false;
+                    ClearAttributes();
+                    termAtt.Append("phrase2");
+                    offsetAtt.SetOffset(savedStart, savedEnd);
+                    return true;
+                }
+                else
+                    while (input.IncrementToken())
+                    {
+                        if (termAtt.toString().Equals("phrase"))
+                        {
+                            inPhrase = true;
+                            savedStart = offsetAtt.StartOffset();
+                            savedEnd = offsetAtt.EndOffset();
+                            termAtt.SetEmpty().Append("phrase1");
+                            offsetAtt.SetOffset(savedStart, savedEnd);
+                            return true;
+                        }
+                        else if (!termAtt.toString().equals("stop"))
+                            return true;
+                    }
+                return false;
+            }
+        }
+
+        public sealed class QPTestAnalyzer : Analyzer
+        {
+            /// <summary>
+            /// Filters MockTokenizer with StopFilter.
+            /// </summary>
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+                return new TokenStreamComponents(tokenizer, new QPTestFilter(tokenizer));
+            }
+        }
+
+        private int originalMaxClauses;
+
+        private string defaultField = "field";
+        public string DefaultField { get { return defaultField; } set { defaultField = value; } }
+
+        public override void SetUp()
+        {
+            base.SetUp();
+            originalMaxClauses = BooleanQuery.MaxClauseCount;
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a)
+        {
+            throw new NotImplementedException();
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC)
+        {
+            throw new NotImplementedException();
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC)
+        {
+            throw new NotImplementedException();
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC, bool value)
+        {
+            throw new NotImplementedException();
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC, bool value)
+        {
+            throw new NotImplementedException();
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void SetDateResolution(ICommonQueryParserConfiguration cqpC, ICharSequence field, DateTools.Resolution value)
+        {
+            throw new NotImplementedException();
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override Query GetQuery(string query, ICommonQueryParserConfiguration cqpC)
+        {
+            throw new NotImplementedException();
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override Query GetQuery(string query, Analyzer a)
+        {
+            throw new NotImplementedException();
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override bool IsQueryParserException(Exception exception)
+        {
+            throw new NotImplementedException();
+        }
+
+        public Query GetQuery(string query)
+        {
+            return GetQuery(query, (Analyzer)null);
+        }
+
+        public void AssertQueryEquals(string query, Analyzer a, string result)
+        {
+            Query q = GetQuery(query, a);
+            string s = q.ToString("field");
+            if (!s.equals(result))
+            {
+                fail("Query /" + query + "/ yielded /" + s
+                     + "/, expecting /" + result + "/");
+            }
+        }
+
+        public void AssertQueryEquals(ICommonQueryParserConfiguration cqpC, string field, string query, string result)
+        {
+            Query q = GetQuery(query, cqpC);
+            string s = q.ToString(field);
+            if (!s.Equals(result))
+            {
+                fail("Query /" + query + "/ yielded /" + s
+                     + "/, expecting /" + result + "/");
+            }
+        }
+
+        public void AssertEscapedQueryEquals(string query, Analyzer a, string result)
+        {
+            string escapedQuery = QueryParserBase.Escape(query);
+            if (!escapedQuery.Equals(result))
+            {
+                fail("Query /" + query + "/ yielded /" + escapedQuery
+                    + "/, expecting /" + result + "/");
+            }
+        }
+
+        public void AssertWildcardQueryEquals(string query, bool lowercase, string result, bool allowLeadingWildcard)
+        {
+            ICommonQueryParserConfiguration cqpC = GetParserConfig(null);
+            cqpC.LowercaseExpandedTerms = lowercase;
+            cqpC.AllowLeadingWildcard = allowLeadingWildcard;
+            Query q = GetQuery(query, cqpC);
+            string s = q.ToString("field");
+            if (!s.equals(result))
+            {
+                fail("WildcardQuery /" + query + "/ yielded /" + s
+                     + "/, expecting /" + result + "/");
+            }
+        }
+
+        public void AssertWildcardQueryEquals(string query, bool lowercase, string result)
+        {
+            AssertWildcardQueryEquals(query, lowercase, result, false);
+        }
+
+        public void AssertWildcardQueryEquals(string query, string result)
+        {
+            Query q = GetQuery(query);
+            string s = q.ToString("field");
+            if (!s.Equals(result))
+            {
+                fail("WildcardQuery /" + query + "/ yielded /" + s + "/, expecting /"
+                    + result + "/");
+            }
+        }
+
+        public Query GetQueryDOA(string query, Analyzer a)
+        {
+            if (a == null)
+                a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+            ICommonQueryParserConfiguration qp = GetParserConfig(a);
+            SetDefaultOperatorAND(qp);
+            return GetQuery(query, qp);
+        }
+
+        public void AssertQueryEqualsDOA(string query, Analyzer a, string result)
+        {
+            Query q = GetQueryDOA(query, a);
+            string s = q.ToString("field");
+            if (!s.Equals(result))
+            {
+                fail("Query /" + query + "/ yielded /" + s
+                     + "/, expecting /" + result + "/");
+            }
+        }
+
+        [Test]
+        public void TestCJK()
+        {
+            // Test Ideographic Space - As wide as a CJK character cell (fullwidth)
+            // used google to translate the word "term" to japanese -> \u7528\u8a9e
+            AssertQueryEquals("term\u3000term\u3000term", null, "term\u0020term\u0020term");
+            AssertQueryEquals("\u7528\u8a9e\u3000\u7528\u8a9e\u3000\u7528\u8a9e", null, "\u7528\u8a9e\u0020\u7528\u8a9e\u0020\u7528\u8a9e");
+        }
+
+        protected class SimpleCJKTokenizer : Tokenizer
+        {
+            private ICharTermAttribute termAtt;
+
+            public SimpleCJKTokenizer(System.IO.TextReader input)
+                : base(input)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+            }
+
+            public override sealed bool IncrementToken()
+            {
+                int ch = input.Read();
+                if (ch < 0)
+                    return false;
+                ClearAttributes();
+                termAtt.SetEmpty().Append((char)ch);
+                return true;
+            }
+        }
+
+        private class SimpleCJKAnalyzer : Analyzer
+        {
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                return new TokenStreamComponents(new SimpleCJKTokenizer(reader));
+            }
+        }
+
+        [Test]
+        public void TestCJKTerm()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "\u4e2d")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, GetQuery("\u4e2d\u56fd", analyzer));
+        }
+
+        [Test]
+        public void TestCJKBoostedTerm()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            BooleanQuery expected = new BooleanQuery();
+            expected.Boost = (0.5f);
+            expected.Add(new TermQuery(new Term("field", "\u4e2d")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, GetQuery("\u4e2d\u56fd^0.5", analyzer));
+        }
+
+        [Test]
+        public void TestCJKPhrase()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            PhraseQuery expected = new PhraseQuery();
+            expected.Add(new Term("field", "\u4e2d"));
+            expected.Add(new Term("field", "\u56fd"));
+
+            assertEquals(expected, GetQuery("\"\u4e2d\u56fd\"", analyzer));
+        }
+
+        [Test]
+        public void TestCJKBoostedPhrase()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            PhraseQuery expected = new PhraseQuery();
+            expected.Boost = (0.5f);
+            expected.Add(new Term("field", "\u4e2d"));
+            expected.Add(new Term("field", "\u56fd"));
+
+            assertEquals(expected, GetQuery("\"\u4e2d\u56fd\"^0.5", analyzer));
+        }
+
+        [Test]
+        public void testCJKSloppyPhrase()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            PhraseQuery expected = new PhraseQuery();
+            expected.Slop = (3);
+            expected.Add(new Term("field", "\u4e2d"));
+            expected.Add(new Term("field", "\u56fd"));
+
+            assertEquals(expected, GetQuery("\"\u4e2d\u56fd\"~3", analyzer));
+        }
+
+        [Test]
+        public void TestAutoGeneratePhraseQueriesOn()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            PhraseQuery expected = new PhraseQuery();
+            expected.Add(new Term("field", "\u4e2d"));
+            expected.Add(new Term("field", "\u56fd"));
+            ICommonQueryParserConfiguration qp = GetParserConfig(analyzer);
+            SetAutoGeneratePhraseQueries(qp, true);
+            assertEquals(expected, GetQuery("\u4e2d\u56fd", qp));
+        }
+
+        [Test]
+        public void TestSimple()
+        {
+            AssertQueryEquals("term term term", null, "term term term");
+            AssertQueryEquals("t�rm term term", new MockAnalyzer(Random()), "t�rm term term");
+            AssertQueryEquals("�mlaut", new MockAnalyzer(Random()), "�mlaut");
+
+            // FIXME: enhance MockAnalyzer to be able to support this
+            // it must no longer extend CharTokenizer
+            //AssertQueryEquals("\"\"", new KeywordAnalyzer(), "");
+            //AssertQueryEquals("foo:\"\"", new KeywordAnalyzer(), "foo:");
+
+            AssertQueryEquals("a AND b", null, "+a +b");
+            AssertQueryEquals("(a AND b)", null, "+a +b");
+            AssertQueryEquals("c OR (a AND b)", null, "c (+a +b)");
+            AssertQueryEquals("a AND NOT b", null, "+a -b");
+            AssertQueryEquals("a AND -b", null, "+a -b");
+            AssertQueryEquals("a AND !b", null, "+a -b");
+            AssertQueryEquals("a && b", null, "+a +b");
+            //    AssertQueryEquals("a && ! b", null, "+a -b");
+
+            AssertQueryEquals("a OR b", null, "a b");
+            AssertQueryEquals("a || b", null, "a b");
+            AssertQueryEquals("a OR !b", null, "a -b");
+            //    AssertQueryEquals("a OR ! b", null, "a -b");
+            AssertQueryEquals("a OR -b", null, "a -b");
+
+            AssertQueryEquals("+term -term term", null, "+term -term term");
+            AssertQueryEquals("foo:term AND field:anotherTerm", null,
+                              "+foo:term +anotherterm");
+            AssertQueryEquals("term AND \"phrase phrase\"", null,
+                              "+term +\"phrase phrase\"");
+            AssertQueryEquals("\"hello there\"", null, "\"hello there\"");
+            assertTrue(GetQuery("a AND b") is BooleanQuery);
+            assertTrue(GetQuery("hello") is TermQuery);
+            assertTrue(GetQuery("\"hello there\"") is PhraseQuery);
+
+            AssertQueryEquals("germ term^2.0", null, "germ term^2.0");
+            AssertQueryEquals("(term)^2.0", null, "term^2.0");
+            AssertQueryEquals("(germ term)^2.0", null, "(germ term)^2.0");
+            AssertQueryEquals("term^2.0", null, "term^2.0");
+            AssertQueryEquals("term^2", null, "term^2.0");
+            AssertQueryEquals("\"germ term\"^2.0", null, "\"germ term\"^2.0");
+            AssertQueryEquals("\"term germ\"^2", null, "\"term germ\"^2.0");
+
+            AssertQueryEquals("(foo OR bar) AND (baz OR boo)", null,
+                              "+(foo bar) +(baz boo)");
+            AssertQueryEquals("((a OR b) AND NOT c) OR d", null,
+                              "(+(a b) -c) d");
+            AssertQueryEquals("+(apple \"steve jobs\") -(foo bar baz)", null,
+                              "+(apple \"steve jobs\") -(foo bar baz)");
+            AssertQueryEquals("+title:(dog OR cat) -author:\"bob dole\"", null,
+                              "+(title:dog title:cat) -author:\"bob dole\"");
+
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void TestDefaultOperator()
+        {
+            throw new NotImplementedException();
+        }
+
+        private class OperatorVsWhitespaceAnalyzer : Analyzer
+        {
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
+            }
+        }
+
+        [Test]
+        public void TestOperatorVsWhitespace()
+        { //LUCENE-2566
+            // +,-,! should be directly adjacent to operand (i.e. not separated by whitespace) to be treated as an operator
+            Analyzer a = new OperatorVsWhitespaceAnalyzer();
+            AssertQueryEquals("a - b", a, "a - b");
+            AssertQueryEquals("a + b", a, "a + b");
+            AssertQueryEquals("a ! b", a, "a ! b");
+        }
+
+        [Test]
+        public void TestPunct()
+        {
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+            AssertQueryEquals("a&b", a, "a&b");
+            AssertQueryEquals("a&&b", a, "a&&b");
+            AssertQueryEquals(".NET", a, ".NET");
+        }
+
+        [Test]
+        public void TestSlop()
+        {
+            AssertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2");
+            AssertQueryEquals("\"term germ\"~2 flork", null, "\"term germ\"~2 flork");
+            AssertQueryEquals("\"term\"~2", null, "term");
+            AssertQueryEquals("\" \"~2 germ", null, "germ");
+            AssertQueryEquals("\"term germ\"~2^2", null, "\"term germ\"~2^2.0");
+        }
+
+        [Test]
+        public void TestNumber()
+        {
+            // The numbers go away because SimpleAnalzyer ignores them
+            AssertQueryEquals("3", null, "");
+            AssertQueryEquals("term 1.0 1 2", null, "term");
+            AssertQueryEquals("term term1 term2", null, "term term term");
+
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, true);
+            AssertQueryEquals("3", a, "3");
+            AssertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2");
+            AssertQueryEquals("term term1 term2", a, "term term1 term2");
+        }
+
+        [Test]
+        public void TestWildcard()
+        {
+            AssertQueryEquals("term*", null, "term*");
+            AssertQueryEquals("term*^2", null, "term*^2.0");
+            AssertQueryEquals("term~", null, "term~2");
+            AssertQueryEquals("term~1", null, "term~1");
+            AssertQueryEquals("term~0.7", null, "term~1");
+            AssertQueryEquals("term~^3", null, "term~2^3.0");
+            AssertQueryEquals("term^3~", null, "term~2^3.0");
+            AssertQueryEquals("term*germ", null, "term*germ");
+            AssertQueryEquals("term*germ^3", null, "term*germ^3.0");
+
+            assertTrue(GetQuery("term*") is PrefixQuery);
+            assertTrue(GetQuery("term*^2") is PrefixQuery);
+            assertTrue(GetQuery("term~") is FuzzyQuery);
+            assertTrue(GetQuery("term~0.7") is FuzzyQuery);
+            FuzzyQuery fq = (FuzzyQuery)GetQuery("term~0.7");
+            assertEquals(1, fq.MaxEdits);
+            assertEquals(FuzzyQuery.DefaultPrefixLength, fq.PrefixLength);
+            fq = (FuzzyQuery)GetQuery("term~");
+            assertEquals(2, fq.MaxEdits);
+            assertEquals(FuzzyQuery.DefaultPrefixLength, fq.PrefixLength);
+
+            AssertParseException("term~1.1"); // value > 1, throws exception
+
+            assertTrue(GetQuery("term*germ") is WildcardQuery);
+
+            /* Tests to see that wild card terms are (or are not) properly
+               * lower-cased with propery parser configuration
+               */
+            // First prefix queries:
+            // by default, convert to lowercase:
+            AssertWildcardQueryEquals("Term*", true, "term*");
+            // explicitly set lowercase:
+            AssertWildcardQueryEquals("term*", true, "term*");
+            AssertWildcardQueryEquals("Term*", true, "term*");
+            AssertWildcardQueryEquals("TERM*", true, "term*");
+            // explicitly disable lowercase conversion:
+            AssertWildcardQueryEquals("term*", false, "term*");
+            AssertWildcardQueryEquals("Term*", false, "Term*");
+            AssertWildcardQueryEquals("TERM*", false, "TERM*");
+            // Then 'full' wildcard queries:
+            // by default, convert to lowercase:
+            AssertWildcardQueryEquals("Te?m", "te?m");
+            // explicitly set lowercase:
+            AssertWildcardQueryEquals("te?m", true, "te?m");
+            AssertWildcardQueryEquals("Te?m", true, "te?m");
+            AssertWildcardQueryEquals("TE?M", true, "te?m");
+            AssertWildcardQueryEquals("Te?m*gerM", true, "te?m*germ");
+            // explicitly disable lowercase conversion:
+            AssertWildcardQueryEquals("te?m", false, "te?m");
+            AssertWildcardQueryEquals("Te?m", false, "Te?m");
+            AssertWildcardQueryEquals("TE?M", false, "TE?M");
+            AssertWildcardQueryEquals("Te?m*gerM", false, "Te?m*gerM");
+            //  Fuzzy queries:
+            AssertWildcardQueryEquals("Term~", "term~2");
+            AssertWildcardQueryEquals("Term~", true, "term~2");
+            AssertWildcardQueryEquals("Term~", false, "Term~2");
+            //  Range queries:
+            AssertWildcardQueryEquals("[A TO C]", "[a TO c]");
+            AssertWildcardQueryEquals("[A TO C]", true, "[a TO c]");
+            AssertWildcardQueryEquals("[A TO C]", false, "[A TO C]");
+            // Test suffix queries: first disallow
+            try
+            {
+                AssertWildcardQueryEquals("*Term", true, "*term");
+            }
+            catch (Exception pe)
+            {
+                // expected exception
+                if (!IsQueryParserException(pe))
+                {
+                    fail();
+                }
+            }
+            try
+            {
+                AssertWildcardQueryEquals("?Term", true, "?term");
+                fail();
+            }
+            catch (Exception pe)
+            {
+                // expected exception
+                if (!IsQueryParserException(pe))
+                {
+                    fail();
+                }
+            }
+            // Test suffix queries: then allow
+            AssertWildcardQueryEquals("*Term", true, "*term", true);
+            AssertWildcardQueryEquals("?Term", true, "?term", true);
+        }
+
+        [Test]
+        public void TestLeadingWildcardType()
+        {
+            ICommonQueryParserConfiguration cqpC = GetParserConfig(null);
+            cqpC.AllowLeadingWildcard = (true);
+            assertEquals(typeof(WildcardQuery), GetQuery("t*erm*", cqpC).GetType());
+            assertEquals(typeof(WildcardQuery), GetQuery("?term*", cqpC).GetType());
+            assertEquals(typeof(WildcardQuery), GetQuery("*term*", cqpC).GetType());
+        }
+
+        [Test]
+        public void TestQPA()
+        {
+            AssertQueryEquals("term term^3.0 term", qpAnalyzer, "term term^3.0 term");
+            AssertQueryEquals("term stop^3.0 term", qpAnalyzer, "term term");
+
+            AssertQueryEquals("term term term", qpAnalyzer, "term term term");
+            AssertQueryEquals("term +stop term", qpAnalyzer, "term term");
+            AssertQueryEquals("term -stop term", qpAnalyzer, "term term");
+
+            AssertQueryEquals("drop AND (stop) AND roll", qpAnalyzer, "+drop +roll");
+            AssertQueryEquals("term +(stop) term", qpAnalyzer, "term term");
+            AssertQueryEquals("term -(stop) term", qpAnalyzer, "term term");
+
+            AssertQueryEquals("drop AND stop AND roll", qpAnalyzer, "+drop +roll");
+            AssertQueryEquals("term phrase term", qpAnalyzer,
+                              "term (phrase1 phrase2) term");
+            AssertQueryEquals("term AND NOT phrase term", qpAnalyzer,
+                              "+term -(phrase1 phrase2) term");
+            AssertQueryEquals("stop^3", qpAnalyzer, "");
+            AssertQueryEquals("stop", qpAnalyzer, "");
+            AssertQueryEquals("(stop)^3", qpAnalyzer, "");
+            AssertQueryEquals("((stop))^3", qpAnalyzer, "");
+            AssertQueryEquals("(stop^3)", qpAnalyzer, "");
+            AssertQueryEquals("((stop)^3)", qpAnalyzer, "");
+            AssertQueryEquals("(stop)", qpAnalyzer, "");
+            AssertQueryEquals("((stop))", qpAnalyzer, "");
+            assertTrue(GetQuery("term term term", qpAnalyzer) is BooleanQuery);
+            assertTrue(GetQuery("term +stop", qpAnalyzer) is TermQuery);
+
+            ICommonQueryParserConfiguration cqpc = GetParserConfig(qpAnalyzer);
+            SetDefaultOperatorAND(cqpc);
+            AssertQueryEquals(cqpc, "field", "term phrase term",
+                "+term +(+phrase1 +phrase2) +term");
+            AssertQueryEquals(cqpc, "field", "phrase",
+                "+phrase1 +phrase2");
+        }
+
+        [Test]
+        public void TestRange()
+        {
+            AssertQueryEquals("[ a TO z]", null, "[a TO z]");
+            AssertQueryEquals("[ a TO z}", null, "[a TO z}");
+            AssertQueryEquals("{ a TO z]", null, "{a TO z]");
+
+            assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((TermRangeQuery)GetQuery("[ a TO z]")).GetRewriteMethod());
+
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true));
+
+            qp.MultiTermRewriteMethod=(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+            assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE, ((TermRangeQuery)GetQuery("[ a TO z]", qp)).GetRewriteMethod());
+
+            // test open ranges
+            AssertQueryEquals("[ a TO * ]", null, "[a TO *]");
+            AssertQueryEquals("[ * TO z ]", null, "[* TO z]");
+            AssertQueryEquals("[ * TO * ]", null, "[* TO *]");
+
+            // mixing exclude and include bounds
+            AssertQueryEquals("{ a TO z ]", null, "{a TO z]");
+            AssertQueryEquals("[ a TO z }", null, "[a TO z}");
+            AssertQueryEquals("{ a TO * ]", null, "{a TO *]");
+            AssertQueryEquals("[ * TO z }", null, "[* TO z}");
+
+            AssertQueryEquals("[ a TO z ]", null, "[a TO z]");
+            AssertQueryEquals("{ a TO z}", null, "{a TO z}");
+            AssertQueryEquals("{ a TO z }", null, "{a TO z}");
+            AssertQueryEquals("{ a TO z }^2.0", null, "{a TO z}^2.0");
+            AssertQueryEquals("[ a TO z] OR bar", null, "[a TO z] bar");
+            AssertQueryEquals("[ a TO z] AND bar", null, "+[a TO z] +bar");
+            AssertQueryEquals("( bar blar { a TO z}) ", null, "bar blar {a TO z}");
+            AssertQueryEquals("gack ( bar blar { a TO z}) ", null, "gack (bar blar {a TO z})");
+
+            AssertQueryEquals("[* TO Z]", null, "[* TO z]");
+            AssertQueryEquals("[A TO *]", null, "[a TO *]");
+            AssertQueryEquals("[* TO *]", null, "[* TO *]");
+        }
+
+        [Test]
+        public void TestRangeWithPhrase()
+        {
+            AssertQueryEquals("[\\* TO \"*\"]", null, "[\\* TO \\*]");
+            AssertQueryEquals("[\"*\" TO *]", null, "[\\* TO *]");
+        }
+
+        private string EscapeDateString(string s)
+        {
+            if (s.IndexOf(" ") > -1)
+            {
+                return "\"" + s + "\"";
+            }
+            else
+            {
+                return s;
+            }
+        }
+
+        /// <summary>for testing DateTools support</summary>
+        private string GetDate(string s, DateTools.Resolution resolution)
+        {
+            // TODO: Is this the correct way to parse the string?
+            DateTime d = DateTime.Parse(s, System.Globalization.CultureInfo.InvariantCulture);
+            return GetDate(d, resolution);
+
+            //// we use the default Locale since LuceneTestCase randomizes it
+            //DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, Locale.getDefault());
+            //return GetDate(df.Parse(s), resolution);      
+        }
+
+        /// <summary>for testing DateTools support</summary>
+        private string GetDate(DateTime d, DateTools.Resolution resolution)
+        {
+            return DateTools.DateToString(d, resolution);
+        }
+
+        private string GetLocalizedDate(int year, int month, int day)
+        {
+            // TODO: Is this the right way to get the localized date?
+            DateTime d = new DateTime(year, month, day, 23, 59, 59, 999);
+            return d.ToString();
+
+            //// we use the default Locale/TZ since LuceneTestCase randomizes it
+            //DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, Locale.getDefault());
+            //Calendar calendar = new GregorianCalendar(TimeZone.getDefault(), Locale.getDefault());
+            //calendar.clear();
+            //calendar.set(year, month, day);
+            //calendar.set(Calendar.HOUR_OF_DAY, 23);
+            //calendar.set(Calendar.MINUTE, 59);
+            //calendar.set(Calendar.SECOND, 59);
+            //calendar.set(Calendar.MILLISECOND, 999);
+            //return df.format(calendar.getTime());
+        }
+
+        // TODO: Fix this test
+        [Test]
+        public void TestDateRange()
+        {
+            Assert.Fail("Test is not implemented");
+
+        //    string startDate = GetLocalizedDate(2002, 1, 1);
+        //    string endDate = GetLocalizedDate(2002, 1, 4);
+        //    // we use the default Locale/TZ since LuceneTestCase randomizes it
+        //    Calendar endDateExpected = new GregorianCalendar(TimeZone.getDefault(), Locale.getDefault());
+        //    endDateExpected.clear();
+        //    endDateExpected.set(2002, 1, 4, 23, 59, 59);
+        //    endDateExpected.set(Calendar.MILLISECOND, 999);
+        //    string defaultField = "default";
+        //    string monthField = "month";
+        //    string hourField = "hour";
+        //    Analyzer a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+        //    CommonQueryParserConfiguration qp = GetParserConfig(a);
+
+        //    // set a field specific date resolution
+        //    SetDateResolution(qp, monthField, DateTools.Resolution.MONTH);
+
+        //    // set default date resolution to MILLISECOND
+        //    qp.SetDateResolution(DateTools.Resolution.MILLISECOND);
+
+        //    // set second field specific date resolution    
+        //    SetDateResolution(qp, hourField, DateTools.Resolution.HOUR);
+
+        //    // for this field no field specific date resolution has been set,
+        //    // so verify if the default resolution is used
+        //    AssertDateRangeQueryEquals(qp, defaultField, startDate, endDate,
+        //            endDateExpected.getTime(), DateTools.Resolution.MILLISECOND);
+
+        //    // verify if field specific date resolutions are used for these two fields
+        //    AssertDateRangeQueryEquals(qp, monthField, startDate, endDate,
+        //            endDateExpected.getTime(), DateTools.Resolution.MONTH);
+
+        //    AssertDateRangeQueryEquals(qp, hourField, startDate, endDate,
+        //            endDateExpected.getTime(), DateTools.Resolution.HOUR);
+        }
+
+        public void AssertDateRangeQueryEquals(ICommonQueryParserConfiguration cqpC, string field, string startDate, string endDate,
+            DateTime endDateInclusive, DateTools.Resolution resolution)
+        {
+            AssertQueryEquals(cqpC, field, field + ":[" + EscapeDateString(startDate) + " TO " + EscapeDateString(endDate) + "]",
+                       "[" + GetDate(startDate, resolution) + " TO " + GetDate(endDateInclusive, resolution) + "]");
+            AssertQueryEquals(cqpC, field, field + ":{" + EscapeDateString(startDate) + " TO " + EscapeDateString(endDate) + "}",
+                       "{" + GetDate(startDate, resolution) + " TO " + GetDate(endDate, resolution) + "}");
+        }
+
+        [Test]
+        public void TestEscaped()
+        {
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+
+            /*AssertQueryEquals("\\[brackets", a, "\\[brackets");
+            AssertQueryEquals("\\[brackets", null, "brackets");
+            AssertQueryEquals("\\\\", a, "\\\\");
+            AssertQueryEquals("\\+blah", a, "\\+blah");
+            AssertQueryEquals("\\(blah", a, "\\(blah");
+
+            AssertQueryEquals("\\-blah", a, "\\-blah");
+            AssertQueryEquals("\\!blah", a, "\\!blah");
+            AssertQueryEquals("\\{blah", a, "\\{blah");
+            AssertQueryEquals("\\}blah", a, "\\}blah");
+            AssertQueryEquals("\\:blah", a, "\\:blah");
+            AssertQueryEquals("\\^blah", a, "\\^blah");
+            AssertQueryEquals("\\[blah", a, "\\[blah");
+            AssertQueryEquals("\\]blah", a, "\\]blah");
+            AssertQueryEquals("\\\"blah", a, "\\\"blah");
+            AssertQueryEquals("\\(blah", a, "\\(blah");
+            AssertQueryEquals("\\)blah", a, "\\)blah");
+            AssertQueryEquals("\\~blah", a, "\\~blah");
+            AssertQueryEquals("\\*blah", a, "\\*blah");
+            AssertQueryEquals("\\?blah", a, "\\?blah");
+            //AssertQueryEquals("foo \\&\\& bar", a, "foo \\&\\& bar");
+            //AssertQueryEquals("foo \\|| bar", a, "foo \\|| bar");
+            //AssertQueryEquals("foo \\AND bar", a, "foo \\AND bar");*/
+
+            AssertQueryEquals("\\a", a, "a");
+
+            AssertQueryEquals("a\\-b:c", a, "a-b:c");
+            AssertQueryEquals("a\\+b:c", a, "a+b:c");
+            AssertQueryEquals("a\\:b:c", a, "a:b:c");
+            AssertQueryEquals("a\\\\b:c", a, "a\\b:c");
+
+            AssertQueryEquals("a:b\\-c", a, "a:b-c");
+            AssertQueryEquals("a:b\\+c", a, "a:b+c");
+            AssertQueryEquals("a:b\\:c", a, "a:b:c");
+            AssertQueryEquals("a:b\\\\c", a, "a:b\\c");
+
+            AssertQueryEquals("a:b\\-c*", a, "a:b-c*");
+            AssertQueryEquals("a:b\\+c*", a, "a:b+c*");
+            AssertQueryEquals("a:b\\:c*", a, "a:b:c*");
+
+            AssertQueryEquals("a:b\\\\c*", a, "a:b\\c*");
+
+            AssertQueryEquals("a:b\\-c~", a, "a:b-c~2");
+            AssertQueryEquals("a:b\\+c~", a, "a:b+c~2");
+            AssertQueryEquals("a:b\\:c~", a, "a:b:c~2");
+            AssertQueryEquals("a:b\\\\c~", a, "a:b\\c~2");
+
+            AssertQueryEquals("[ a\\- TO a\\+ ]", null, "[a- TO a+]");
+            AssertQueryEquals("[ a\\: TO a\\~ ]", null, "[a: TO a~]");
+            AssertQueryEquals("[ a\\\\ TO a\\* ]", null, "[a\\ TO a*]");
+
+            AssertQueryEquals("[\"c\\:\\\\temp\\\\\\~foo0.txt\" TO \"c\\:\\\\temp\\\\\\~foo9.txt\"]", a,
+                              "[c:\\temp\\~foo0.txt TO c:\\temp\\~foo9.txt]");
+
+            AssertQueryEquals("a\\\\\\+b", a, "a\\+b");
+
+            AssertQueryEquals("a \\\"b c\\\" d", a, "a \"b c\" d");
+            AssertQueryEquals("\"a \\\"b c\\\" d\"", a, "\"a \"b c\" d\"");
+            AssertQueryEquals("\"a \\+b c d\"", a, "\"a +b c d\"");
+
+            AssertQueryEquals("c\\:\\\\temp\\\\\\~foo.txt", a, "c:\\temp\\~foo.txt");
+
+            AssertParseException("XY\\"); // there must be a character after the escape char
+
+            // test unicode escaping
+            AssertQueryEquals("a\\u0062c", a, "abc");
+            AssertQueryEquals("XY\\u005a", a, "XYZ");
+            AssertQueryEquals("XY\\u005A", a, "XYZ");
+            AssertQueryEquals("\"a \\\\\\u0028\\u0062\\\" c\"", a, "\"a \\(b\" c\"");
+
+            AssertParseException("XY\\u005G");  // test non-hex character in escaped unicode sequence
+            AssertParseException("XY\\u005");   // test incomplete escaped unicode sequence
+
+            // Tests bug LUCENE-800
+            AssertQueryEquals("(item:\\\\ item:ABCD\\\\)", a, "item:\\ item:ABCD\\");
+            AssertParseException("(item:\\\\ item:ABCD\\\\))"); // unmatched closing paranthesis 
+            AssertQueryEquals("\\*", a, "*");
+            AssertQueryEquals("\\\\", a, "\\");  // escaped backslash
+
+            AssertParseException("\\"); // a backslash must always be escaped
+
+            // LUCENE-1189
+            AssertQueryEquals("(\"a\\\\\") or (\"b\")", a, "a\\ or b");
+        }
+
+        [Test]
+        public void TestEscapedVsQuestionMarkAsWildcard()
+        {
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+            AssertQueryEquals("a:b\\-?c", a, "a:b\\-?c");
+            AssertQueryEquals("a:b\\+?c", a, "a:b\\+?c");
+            AssertQueryEquals("a:b\\:?c", a, "a:b\\:?c");
+
+            AssertQueryEquals("a:b\\\\?c", a, "a:b\\\\?c");
+        }
+
+        [Test]
+        public void TestQueryStringEscaping()
+        {
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+
+            AssertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c");
+            AssertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c");
+            AssertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
+            AssertEscapedQueryEquals("a\\b:c", a, "a\\\\b\\:c");
+
+            AssertEscapedQueryEquals("a:b-c", a, "a\\:b\\-c");
+            AssertEscapedQueryEquals("a:b+c", a, "a\\:b\\+c");
+            AssertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
+            AssertEscapedQueryEquals("a:b\\c", a, "a\\:b\\\\c");
+
+            AssertEscapedQueryEquals("a:b-c*", a, "a\\:b\\-c\\*");
+            AssertEscapedQueryEquals("a:b+c*", a, "a\\:b\\+c\\*");
+            AssertEscapedQueryEquals("a:b:c*", a, "a\\:b\\:c\\*");
+
+            AssertEscapedQueryEquals("a:b\\\\c*", a, "a\\:b\\\\\\\\c\\*");
+
+            AssertEscapedQueryEquals("a:b-?c", a, "a\\:b\\-\\?c");
+            AssertEscapedQueryEquals("a:b+?c", a, "a\\:b\\+\\?c");
+            AssertEscapedQueryEquals("a:b:?c", a, "a\\:b\\:\\?c");
+
+            AssertEscapedQueryEquals("a:b?c", a, "a\\:b\\?c");
+
+            AssertEscapedQueryEquals("a:b-c~", a, "a\\:b\\-c\\~");
+            AssertEscapedQueryEquals("a:b+c~", a, "a\\:b\\+c\\~");
+            AssertEscapedQueryEquals("a:b:c~", a, "a\\:b\\:c\\~");
+            AssertEscapedQueryEquals("a:b\\c~", a, "a\\:b\\\\c\\~");
+
+            AssertEscapedQueryEquals("[ a - TO a+ ]", null, "\\[ a \\- TO a\\+ \\]");
+            AssertEscapedQueryEquals("[ a : TO a~ ]", null, "\\[ a \\: TO a\\~ \\]");
+            AssertEscapedQueryEquals("[ a\\ TO a* ]", null, "\\[ a\\\\ TO a\\* \\]");
+
+            // LUCENE-881
+            AssertEscapedQueryEquals("|| abc ||", a, "\\|\\| abc \\|\\|");
+            AssertEscapedQueryEquals("&& abc &&", a, "\\&\\& abc \\&\\&");
+        }
+
+        [Test]
+        public void TestTabNewlineCarriageReturn()
+        {
+            AssertQueryEqualsDOA("+weltbank +worlbank", null,
+              "+weltbank +worlbank");
+
+            AssertQueryEqualsDOA("+weltbank\n+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \n+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \n +worlbank", null,
+              "+weltbank +worlbank");
+
+            AssertQueryEqualsDOA("+weltbank\r+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \r+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \r +worlbank", null,
+              "+weltbank +worlbank");
+
+            AssertQueryEqualsDOA("+weltbank\r\n+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \r\n+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \r\n +worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \r \n +worlbank", null,
+              "+weltbank +worlbank");
+
+            AssertQueryEqualsDOA("+weltbank\t+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \t+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \t +worlbank", null,
+              "+weltbank +worlbank");
+        }
+
+        [Test]
+        public void TestSimpleDAO()
+        {
+            AssertQueryEqualsDOA("term term term", null, "+term +term +term");
+            AssertQueryEqualsDOA("term +term term", null, "+term +term +term");
+            AssertQueryEqualsDOA("term term +term", null, "+term +term +term");
+            AssertQueryEqualsDOA("term +term +term", null, "+term +term +term");
+            AssertQueryEqualsDOA("-term term term", null, "-term +term +term");
+        }
+
+        [Test]
+        public void TestBoost()
+        {
+            CharacterRunAutomaton stopWords = new CharacterRunAutomaton(BasicAutomata.MakeString("on"));
+            Analyzer oneStopAnalyzer = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, stopWords);
+            ICommonQueryParserConfiguration qp = GetParserConfig(oneStopAnalyzer);
+            Query q = GetQuery("on^1.0", qp);
+            assertNotNull(q);
+            q = GetQuery("\"hello\"^2.0", qp);
+            assertNotNull(q);
+            assertEquals(q.Boost, (float)2.0, (float)0.5);
+            q = GetQuery("hello^2.0", qp);
+            assertNotNull(q);
+            assertEquals(q.Boost, (float)2.0, (float)0.5);
+            q = GetQuery("\"on\"^1.0", qp);
+            assertNotNull(q);
+
+            Analyzer a2 = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
+            ICommonQueryParserConfiguration qp2 = GetParserConfig(a2);
+            q = GetQuery("the^3", qp2);
+            // "the" is a stop word so the result is an empty query:
+            assertNotNull(q);
+            assertEquals("", q.toString());
+            assertEquals(1.0f, q.Boost, 0.01f);
+        }
+
+        public void AssertParseException(string queryString)
+        {
+            try
+            {
+                GetQuery(queryString);
+            }
+            catch (Exception expected)
+            {
+                if (IsQueryParserException(expected))
+                {
+                    return;
+                }
+            }
+            fail("ParseException expected, not thrown");
+        }
+
+        public void AssertParseException(string queryString, Analyzer a)
+        {
+            try
+            {
+                GetQuery(queryString, a);
+            }
+            catch (Exception expected)
+            {
+                if (IsQueryParserException(expected))
+                {
+                    return;
+                }
+            }
+            fail("ParseException expected, not thrown");
+        }
+
+        [Test]
+        public void TestException()
+        {
+            AssertParseException("\"some phrase");
+            AssertParseException("(foo bar");
+            AssertParseException("foo bar))");
+            AssertParseException("field:term:with:colon some more terms");
+            AssertParseException("(sub query)^5.0^2.0 plus more");
+            AssertParseException("secret AND illegal) AND access:confidential");
+        }
+
+        [Test]
+        public void TestBooleanQuery()
+        {
+            BooleanQuery.MaxClauseCount = (2);
+            Analyzer purWhitespaceAnalyzer = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+            AssertParseException("one two three", purWhitespaceAnalyzer);
+        }
+
+        [Test]
+        public void TestPrecedence()
+        {
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+            Query query1 = GetQuery("A AND B OR C AND D", qp);
+            Query query2 = GetQuery("+A +B +C +D", qp);
+            assertEquals(query1, query2);
+        }
+
+        // LUCENETODO: convert this from DateField to DateUtil
+        //  public void testLocalDateFormat() throws IOException, ParseException {
+        //    Directory ramDir = newDirectory();
+        //    IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
+        //    addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
+        //    addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
+        //    iw.close();
+        //    IndexSearcher is = new IndexSearcher(ramDir, true);
+        //    assertHits(1, "[12/1/2005 TO 12/3/2005]", is);
+        //    assertHits(2, "[12/1/2005 TO 12/4/2005]", is);
+        //    assertHits(1, "[12/3/2005 TO 12/4/2005]", is);
+        //    assertHits(1, "{12/1/2005 TO 12/3/2005}", is);
+        //    assertHits(1, "{12/1/2005 TO 12/4/2005}", is);
+        //    assertHits(0, "{12/3/2005 TO 12/4/2005}", is);
+        //    is.close();
+        //    ramDir.close();
+        //  }
+        //
+        //  private void addDateDoc(String content, int year, int month,
+        //                          int day, int hour, int minute, int second, IndexWriter iw) throws IOException {
+        //    Document d = new Document();
+        //    d.add(newField("f", content, Field.Store.YES, Field.Index.ANALYZED));
+        //    Calendar cal = Calendar.getInstance(Locale.ENGLISH);
+        //    cal.set(year, month - 1, day, hour, minute, second);
+        //    d.add(newField("date", DateField.dateToString(cal.getTime()), Field.Store.YES, Field.Index.NOT_ANALYZED));
+        //    iw.addDocument(d);
+        //  }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void TestStarParsing()
+        {
+            throw new NotImplementedException();
+        }
+
+        [Test]
+        public void TestEscapedWildcard()
+        {
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+            WildcardQuery q = new WildcardQuery(new Term("field", "foo\\?ba?r"));
+            assertEquals(q, GetQuery("foo\\?ba?r", qp));
+        }
+
+        [Test]
+        public void TestRegexps()
+        {
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+            RegexpQuery q = new RegexpQuery(new Term("field", "[a-z][123]"));
+            assertEquals(q, GetQuery("/[a-z][123]/", qp));
+            qp.LowercaseExpandedTerms = (true);
+            assertEquals(q, GetQuery("/[A-Z][123]/", qp));
+            q.Boost = (0.5f);
+            assertEquals(q, GetQuery("/[A-Z][123]/^0.5", qp));
+            qp.MultiTermRewriteMethod=(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+            q.SetRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+            assertTrue(GetQuery("/[A-Z][123]/^0.5", qp) is RegexpQuery);
+            assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE, ((RegexpQuery)GetQuery("/[A-Z][123]/^0.5", qp)).GetRewriteMethod());
+            assertEquals(q, GetQuery("/[A-Z][123]/^0.5", qp));
+            qp.MultiTermRewriteMethod=(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT);
+
+            Query escaped = new RegexpQuery(new Term("field", "[a-z]\\/[123]"));
+            assertEquals(escaped, GetQuery("/[a-z]\\/[123]/", qp));
+            Query escaped2 = new RegexpQuery(new Term("field", "[a-z]\\*[123]"));
+            assertEquals(escaped2, GetQuery("/[a-z]\\*[123]/", qp));
+
+            BooleanQuery complex = new BooleanQuery();
+            complex.Add(new RegexpQuery(new Term("field", "[a-z]\\/[123]")), BooleanClause.Occur.MUST);
+            complex.Add(new TermQuery(new Term("path", "/etc/init.d/")), BooleanClause.Occur.MUST);
+            complex.Add(new TermQuery(new Term("field", "/etc/init[.]d/lucene/")), BooleanClause.Occur.SHOULD);
+            assertEquals(complex, GetQuery("/[a-z]\\/[123]/ AND path:\"/etc/init.d/\" OR \"/etc\\/init\\[.\\]d/lucene/\" ", qp));
+
+            Query re = new RegexpQuery(new Term("field", "http.*"));
+            assertEquals(re, GetQuery("field:/http.*/", qp));
+            assertEquals(re, GetQuery("/http.*/", qp));
+
+            re = new RegexpQuery(new Term("field", "http~0.5"));
+            assertEquals(re, GetQuery("field:/http~0.5/", qp));
+            assertEquals(re, GetQuery("/http~0.5/", qp));
+
+            re = new RegexpQuery(new Term("field", "boo"));
+            assertEquals(re, GetQuery("field:/boo/", qp));
+            assertEquals(re, GetQuery("/boo/", qp));
+
+            assertEquals(new TermQuery(new Term("field", "/boo/")), GetQuery("\"/boo/\"", qp));
+            assertEquals(new TermQuery(new Term("field", "/boo/")), GetQuery("\\/boo\\/", qp));
+
+            BooleanQuery two = new BooleanQuery();
+            two.Add(new RegexpQuery(new Term("field", "foo")), BooleanClause.Occur.SHOULD);
+            two.Add(new RegexpQuery(new Term("field", "bar")), BooleanClause.Occur.SHOULD);
+            assertEquals(two, GetQuery("field:/foo/ field:/bar/", qp));
+            assertEquals(two, GetQuery("/foo/ /bar/", qp));
+        }
+
+        [Test]
+        public void TestStopwords()
+        {
+            CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").ToAutomaton());
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, stopSet));
+            Query result = GetQuery("field:the OR field:foo", qp);
+            assertNotNull("result is null and it shouldn't be", result);
+            assertTrue("result is not a BooleanQuery", result is BooleanQuery);
+            assertTrue(((BooleanQuery)result).Clauses.Length + " does not equal: " + 0, ((BooleanQuery)result).Clauses.Length == 0);
+            result = GetQuery("field:woo OR field:the", qp);
+            assertNotNull("result is null and it shouldn't be", result);
+            assertTrue("result is not a TermQuery", result is TermQuery);
+            result = GetQuery("(fieldX:xxxxx OR fieldy:xxxxxxxx)^2 AND (fieldx:the OR fieldy:foo)", qp);
+            assertNotNull("result is null and it shouldn't be", result);
+            assertTrue("result is not a BooleanQuery", result is BooleanQuery);
+            if (VERBOSE) Console.WriteLine("Result: " + result);
+            assertTrue(((BooleanQuery)result).Clauses.Length + " does not equal: " + 2, ((BooleanQuery)result).Clauses.Length == 2);
+        }
+
+        [Test]
+        public void TestPositionIncrement()
+        {
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET));
+            qp.EnablePositionIncrements = (true);
+            String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\"";
+            //               0         2                      5           7  8
+            int[] expectedPositions = { 1, 3, 4, 6, 9 };
+            PhraseQuery pq = (PhraseQuery)GetQuery(qtxt, qp);
+            //System.out.println("Query text: "+qtxt);
+            //System.out.println("Result: "+pq);
+            Term[] t = pq.Terms;
+            int[] pos = pq.Positions;
+            for (int i = 0; i < t.Length; i++)
+            {
+                //System.out.println(i+". "+t[i]+"  pos: "+pos[i]);
+                assertEquals("term " + i + " = " + t[i] + " has wrong term-position!", expectedPositions[i], pos[i]);
+            }
+        }
+
+        [Test]
+        public void TestMatchAllDocs()
+        {
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+            assertEquals(new MatchAllDocsQuery(), GetQuery("*:*", qp));
+            assertEquals(new MatchAllDocsQuery(), GetQuery("(*:*)", qp));
+            BooleanQuery bq = (BooleanQuery)GetQuery("+*:* -*:*", qp);
+            assertTrue(bq.Clauses[0].Query is MatchAllDocsQuery);
+            assertTrue(bq.Clauses[1].Query is MatchAllDocsQuery);
+        }
+
+        private void AssertHits(int expected, String query, IndexSearcher @is)
+        {
+            string oldDefaultField = DefaultField;
+            DefaultField = "date";
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+            qp.Locale = System.Globalization.CultureInfo.GetCultureInfo("en");
+            Query q = GetQuery(query, qp);
+            ScoreDoc[] hits = @is.Search(q, null, 1000).ScoreDocs;
+            assertEquals(expected, hits.Length);
+            DefaultField = oldDefaultField;
+        }
+
+        public override void TearDown()
+        {
+            BooleanQuery.MaxClauseCount = originalMaxClauses;
+            base.TearDown();
+        }
+
+        // LUCENE-2002: make sure defaults for StandardAnalyzer's
+        // enableStopPositionIncr & QueryParser's enablePosIncr
+        // "match"
+        [Test]
+        public void TestPositionIncrements()
+        {
+            using (Directory dir = NewDirectory())
+            {
+                Analyzer a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
+                using (IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, a)))
+                {
+                    Document doc = new Document();
+                    doc.Add(NewTextField("field", "the wizard of ozzy", Field.Store.NO));
+                    w.AddDocument(doc);
+                    using (IndexReader r = DirectoryReader.Open(w, true))
+                    {
+                        IndexSearcher s = NewSearcher(r);
+
+                        Query q = GetQuery("\"wizard of ozzy\"", a);
+                        assertEquals(1, s.Search(q, 1).TotalHits);
+                    }
+                }
+            }
+        }
+
+        /// <summary>
+        /// adds synonym of "dog" for "dogs".
+        /// </summary>
+        protected class MockSynonymFilter : TokenFilter
+        {
+            ICharTermAttribute termAtt;
+            IPositionIncrementAttribute posIncAtt;
+            bool addSynonym = false;
+
+            public MockSynonymFilter(TokenStream input)
+                : base(input)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+                posIncAtt = AddAttribute<IPositionIncrementAttribute>();
+            }
+
+            public override sealed bool IncrementToken()
+            {
+                if (addSynonym)
+                { // inject our synonym
+                    ClearAttributes();
+                    termAtt.SetEmpty().Append("dog");
+                    posIncAtt.PositionIncrement = (0);
+                    addSynonym = false;
+                    return true;
+                }
+
+                if (input.IncrementToken())
+                {
+                    addSynonym = termAtt.toString().equals("dogs");
+                    return true;
+                }
+                else
+                {
+                    return false;
+                }
+            }
+        }
+
+        /// <summary>
+        /// whitespace+lowercase analyzer without synonyms
+        /// </summary>
+        protected class Analyzer1 : Analyzer
+        {
+            public Analyzer1()
+            { }
+
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+                return new TokenStreamComponents(tokenizer, new MockSynonymFilter(tokenizer));
+            }
+        }
+
+        /// <summary>
+        /// whitespace+lowercase analyzer without synonyms
+        /// </summary>
+        protected class Analyzer2 : Analyzer
+        {
+            public Analyzer2()
+            { }
+
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, true));
+            }
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void TestNewFieldQuery()
+        {
+            throw new NotImplementedException();
+        }
+
+        /// <summary>
+        /// Mock collation analyzer: indexes terms as "collated" + term
+        /// </summary>
+        private class MockCollationFilter : TokenFilter
+        {
+            private ICharTermAttribute termAtt;
+
+            public MockCollationFilter(TokenStream input)
+                : base(input)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+            }
+
+            public override bool IncrementToken()
+            {
+                if (input.IncrementToken())
+                {
+                    string term = termAtt.toString();
+                    termAtt.SetEmpty().Append("collated").Append(term);
+                    return true;
+                }
+                else
+                {
+                    return false;
+                }
+            }
+        }
+
+        private class MockCollationAnalyzer : Analyzer
+        {
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+                return new TokenStreamComponents(tokenizer, new MockCollationFilter(tokenizer));
+            }
+        }
+
+        [Test]
+        public void TestCollatedRange()
+        {
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockCollationAnalyzer());
+            SetAnalyzeRangeTerms(qp, true);
+            Query expected = TermRangeQuery.NewStringRange(DefaultField, "collatedabc", "collateddef", true, true);
+            Query actual = GetQuery("[abc TO def]", qp);
+            assertEquals(expected, actual);
+        }
+
+        [Test]
+        public void TestDistanceAsEditsParsing()
+        {
+            FuzzyQuery q = (FuzzyQuery)GetQuery("foobar~2", new MockAnalyzer(Random()));
+            assertEquals(2, q.MaxEdits);
+        }
+
+        [Test]
+        public void TestPhraseQueryToString()
+        {
+            Analyzer analyzer = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
+            ICommonQueryParserConfiguration qp = GetParserConfig(analyzer);
+            qp.EnablePositionIncrements = (true);
+            PhraseQuery q = (PhraseQuery)GetQuery("\"this hi this is a test is\"", qp);
+            assertEquals("field:\"? hi ? ? ? test\"", q.toString());
+        }
+
+        [Test]
+        public void TestParseWildcardAndPhraseQueries()
+        {
+            string field = "content";
+            string oldDefaultField = DefaultField;
+            DefaultField = (field);
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random()));
+            qp.AllowLeadingWildcard=(true);
+
+            string[][] prefixQueries = new string[3][] {
+                new string[] {"a*", "ab*", "abc*",},
+                new string[] {"h*", "hi*", "hij*", "\\\\7*"},
+                new string[] {"o*", "op*", "opq*", "\\\\\\\\*"},
+            };
+
+            string[][] wildcardQueries = new string[3][] {
+                new string[] {"*a*", "*ab*", "*abc**", "ab*e*", "*g?", "*f?1", "abc**"},
+                new string[] {"*h*", "*hi*", "*hij**", "hi*k*", "*n?", "*m?1", "hij**"},
+                new string[] {"*o*", "*op*", "*opq**", "op*q*", "*u?", "*t?1", "opq**"},
+            };
+
+            // test queries that must be prefix queries
+            for (int i = 0; i < prefixQueries.Length; i++)
+            {
+                for (int j = 0; j < prefixQueries[i].Length; j++)
+                {
+                    string queryString = prefixQueries[i][j];
+                    Query q = GetQuery(queryString, qp);
+                    assertEquals(typeof(PrefixQuery), q.GetType());
+                }
+            }
+
+            // test queries that must be wildcard queries
+            for (int i = 0; i < wildcardQueries.Length; i++)
+            {
+                for (int j = 0; j < wildcardQueries[i].Length; j++)
+                {
+                    string qtxt = wildcardQueries[i][j];
+                    Query q = GetQuery(qtxt, qp);
+                    assertEquals(typeof(WildcardQuery), q.GetType());
+                }
+            }
+            DefaultField = (oldDefaultField);
+        }
+
+        [Test]
+        public void TestPhraseQueryPositionIncrements()
+        {
+            CharacterRunAutomaton stopStopList =
+            new CharacterRunAutomaton(new RegExp("[sS][tT][oO][pP]").ToAutomaton());
+
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false, stopStopList));
+
+            qp = GetParserConfig(
+                                 new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false, stopStopList));
+            qp.EnablePositionIncrements=(true);
+
+            PhraseQuery phraseQuery = new PhraseQuery();
+            phraseQuery.Add(new Term("field", "1"));
+            phraseQuery.Add(new Term("field", "2"), 2);
+            assertEquals(phraseQuery, GetQuery("\"1 stop 2\"", qp));
+        }
+
+        [Test]
+        public void TestMatchAllQueryParsing()
+        {
+            // test simple parsing of MatchAllDocsQuery
+            string oldDefaultField = DefaultField;
+            DefaultField = ("key");
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random()));
+            assertEquals(new MatchAllDocsQuery(), GetQuery(new MatchAllDocsQuery().toString(), qp));
+
+            // test parsing with non-default boost
+            MatchAllDocsQuery query = new MatchAllDocsQuery();
+            query.Boost = (2.3f);
+            assertEquals(query, GetQuery(query.toString(), qp));
+            DefaultField = (oldDefaultField);
+        }
+
+        [Test]
+        public void TestNestedAndClausesFoo()
+        {
+            string query = "(field1:[1 TO *] AND field1:[* TO 2]) AND field2:(z)";
+            BooleanQuery q = new BooleanQuery();
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(TermRangeQuery.NewStringRange("field1", "1", null, true, true), BooleanClause.Occur.MUST);
+            bq.Add(TermRangeQuery.NewStringRange("field1", null, "2", true, true), BooleanClause.Occur.MUST);
+            q.Add(bq, BooleanClause.Occur.MUST);
+            q.Add(new TermQuery(new Term("field2", "z")), BooleanClause.Occur.MUST);
+            assertEquals(q, GetQuery(query, new MockAnalyzer(Random())));
+        }
+    }
+
+
+    /// <summary>
+    /// This class was added in .NET because the Visual Studio test runner
+    /// does not detect tests in abstract classes. Therefore, the abstract members
+    /// of QueryParserTestBase were moved here so the QueryParserTestBase class
+    /// could be made concrete.
+    /// </summary>
+    public abstract class AbstractQueryParserTestBase : LuceneTestCase
+    {
+        public abstract void TestStarParsing();
+
+        public abstract void TestNewFieldQuery();
+
+        public abstract void TestDefaultOperator();
+
+        public abstract ICommonQueryParserConfiguration GetParserConfig(Analyzer a);
+
+        public abstract void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC);
+
+        public abstract void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC);
+
+        public abstract void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC, bool value);
+
+        public abstract void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC, bool value);
+
+        public abstract void SetDateResolution(ICommonQueryParserConfiguration cqpC, ICharSequence field, DateTools.Resolution value);
+
+        public abstract Query GetQuery(string query, ICommonQueryParserConfiguration cqpC);
+
+        public abstract Query GetQuery(string query, Analyzer a);
+
+        public abstract bool IsQueryParserException(Exception exception);
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6d711567/Lucene.Net.Tests.QueryParser/packages.config
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/packages.config b/Lucene.Net.Tests.QueryParser/packages.config
new file mode 100644
index 0000000..139d513
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/packages.config
@@ -0,0 +1,4 @@
+\ufeff<?xml version="1.0" encoding="utf-8"?>
+<packages>
+  <package id="NUnit" version="2.6.3" targetFramework="net451" />
+</packages>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6d711567/Lucene.Net.sln
----------------------------------------------------------------------
diff --git a/Lucene.Net.sln b/Lucene.Net.sln
index 17416b6..c6031b4 100644
--- a/Lucene.Net.sln
+++ b/Lucene.Net.sln
@@ -48,6 +48,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.Tests.Codecs", "
 EndProject
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.QueryParser", "Lucene.Net.QueryParser\Lucene.Net.QueryParser.csproj", "{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}"
 EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.Tests.QueryParser", "Lucene.Net.Tests.QueryParser\Lucene.Net.Tests.QueryParser.csproj", "{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}"
+EndProject
 Global
 	GlobalSection(SolutionConfigurationPlatforms) = preSolution
 		Debug|Any CPU = Debug|Any CPU
@@ -257,6 +259,16 @@ Global
 		{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
 		{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}.Release|Mixed Platforms.Build.0 = Release|Any CPU
 		{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}.Release|x86.ActiveCfg = Release|Any CPU
+		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Debug|Any CPU.Build.0 = Debug|Any CPU
+		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
+		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
+		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Debug|x86.ActiveCfg = Debug|Any CPU
+		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Release|Any CPU.ActiveCfg = Release|Any CPU
+		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Release|Any CPU.Build.0 = Release|Any CPU
+		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
+		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Release|Mixed Platforms.Build.0 = Release|Any CPU
+		{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}.Release|x86.ActiveCfg = Release|Any CPU
 	EndGlobalSection
 	GlobalSection(SolutionProperties) = preSolution
 		HideSolutionNode = FALSE


[23/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
new file mode 100644
index 0000000..49ef7d4
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
@@ -0,0 +1,912 @@
+\ufeffusing Lucene.Net.QueryParser.Surround.Query;
+using System;
+using System.Collections.Generic;
+using System.IO;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// This class is generated by JavaCC.  The only method that clients should need
+    /// to call is {@link #parse parse()}.
+    ///
+
+    /// <p>This parser generates queries that make use of position information
+    ///   (Span queries). It provides positional operators (<code>w</code> and
+    ///   <code>n</code>) that accept a numeric distance, as well as boolean
+    ///   operators (<code>and</code>, <code>or</code>, and <code>not</code>,
+    ///   wildcards (<code>///</code> and <code>?</code>), quoting (with
+    ///   <code>"</code>), and boosting (via <code>^</code>).</p>
+
+    ///  <p>The operators (W, N, AND, OR, NOT) can be expressed lower-cased or
+    ///   upper-cased, and the non-unary operators (everything but NOT) support
+    ///   both infix <code>(a AND b AND c)</code> and prefix <code>AND(a, b,
+    ///   c)</code> notation. </p>
+
+    ///  <p>The W and N operators express a positional relationship among their
+    ///  operands.  N is ordered, and W is unordered.  The distance is 1 by
+    ///  default, meaning the operands are adjacent, or may be provided as a
+    ///  prefix from 2-99.  So, for example, 3W(a, b) means that terms a and b
+    ///  must appear within three positions of each other, or in other words, up
+    ///  to two terms may appear between a and b.  </p>
+    /// </summary>
+    public class QueryParser
+    {
+        internal readonly int minimumPrefixLength = 3;
+        internal readonly int minimumCharsInTrunc = 3;
+        internal readonly string truncationErrorMessage = "Too unrestrictive truncation: ";
+        internal readonly string boostErrorMessage = "Cannot handle boost value: ";
+
+        /* CHECKME: These should be the same as for the tokenizer. How? */
+        internal readonly char truncator = '*';
+        internal readonly char anyChar = '?';
+        internal readonly char quote = '"';
+        internal readonly char fieldOperator = ':';
+        internal readonly char comma = ','; /* prefix list separator */
+        internal readonly char carat = '^'; /* weight operator */
+
+        public static SrndQuery Parse(string query)
+        {
+            QueryParser parser = new QueryParser();
+            return parser.Parse2(query);
+        }
+
+        public QueryParser()
+            : this(new FastCharStream(new StringReader("")))
+        {
+        }
+
+        public virtual SrndQuery Parse2(string query)
+        {
+            ReInit(new FastCharStream(new StringReader(query)));
+            try
+            {
+                return TopSrndQuery();
+            }
+            catch (TokenMgrError tme)
+            {
+                throw new ParseException(tme.Message);
+            }
+        }
+
+        protected virtual SrndQuery GetFieldsQuery(
+            SrndQuery q, IEnumerable<string> fieldNames)
+        {
+            /* FIXME: check acceptable subquery: at least one subquery should not be
+             * a fields query.
+             */
+            return new FieldsQuery(q, fieldNames, fieldOperator);
+        }
+
+        protected virtual SrndQuery GetOrQuery(IEnumerable<SrndQuery> queries, bool infix, Token orToken)
+        {
+            return new OrQuery(queries, infix, orToken.image);
+        }
+
+        protected virtual SrndQuery GetAndQuery(IEnumerable<SrndQuery> queries, bool infix, Token andToken)
+        {
+            return new AndQuery(queries, infix, andToken.image);
+        }
+
+        protected virtual SrndQuery GetNotQuery(IEnumerable<SrndQuery> queries, Token notToken)
+        {
+            return new NotQuery(queries, notToken.image);
+        }
+
+        protected static int GetOpDistance(string distanceOp)
+        {
+            /* W, 2W, 3W etc -> 1, 2 3, etc. Same for N, 2N ... */
+            return distanceOp.Length == 1
+              ? 1
+              : int.Parse(distanceOp.Substring(0, distanceOp.Length - 1));
+        }
+
+        protected static void CheckDistanceSubQueries(DistanceQuery distq, string opName)
+        {
+            string m = distq.DistanceSubQueryNotAllowed();
+            if (m != null)
+            {
+                throw new ParseException("Operator " + opName + ": " + m);
+            }
+        }
+
+        protected virtual SrndQuery GetDistanceQuery(
+            IEnumerable<SrndQuery> queries,
+            bool infix,
+            Token dToken,
+            bool ordered)
+        {
+            DistanceQuery dq = new DistanceQuery(queries,
+                                                infix,
+                                                GetOpDistance(dToken.image),
+                                                dToken.image,
+                                                ordered);
+            CheckDistanceSubQueries(dq, dToken.image);
+            return dq;
+        }
+
+        protected virtual SrndQuery GetTermQuery(
+              String term, bool quoted)
+        {
+            return new SrndTermQuery(term, quoted);
+        }
+
+        protected virtual bool AllowedSuffix(String suffixed)
+        {
+            return (suffixed.Length - 1) >= minimumPrefixLength;
+        }
+
+        protected virtual SrndQuery GetPrefixQuery(
+            string prefix, bool quoted)
+        {
+            return new SrndPrefixQuery(prefix, quoted, truncator);
+        }
+
+        protected virtual bool AllowedTruncation(string truncated)
+        {
+            /* At least 3 normal characters needed. */
+            int nrNormalChars = 0;
+            for (int i = 0; i < truncated.Length; i++)
+            {
+                char c = truncated[i];
+                if ((c != truncator) && (c != anyChar))
+                {
+                    nrNormalChars++;
+                }
+            }
+            return nrNormalChars >= minimumCharsInTrunc;
+        }
+
+        protected virtual SrndQuery GetTruncQuery(string truncated)
+        {
+            return new SrndTruncQuery(truncated, truncator, anyChar);
+        }
+
+        public SrndQuery TopSrndQuery()
+        {
+            SrndQuery q;
+            q = FieldsQuery();
+            Jj_consume_token(0);
+            { if (true) return q; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery FieldsQuery()
+        {
+            SrndQuery q;
+            IEnumerable<string> fieldNames;
+            fieldNames = OptionalFields();
+            q = OrQuery();
+            { if (true) return (fieldNames == null) ? q : GetFieldsQuery(q, fieldNames); }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public IEnumerable<string> OptionalFields()
+        {
+            Token fieldName;
+            IList<string> fieldNames = null;
+
+            while (true)
+            {
+                if (Jj_2_1(2))
+                {
+                    ;
+                }
+                else
+                {
+                    goto label_1;
+                }
+                // to the colon
+                fieldName = Jj_consume_token(RegexpToken.TERM);
+                Jj_consume_token(RegexpToken.COLON);
+                if (fieldNames == null)
+                {
+                    fieldNames = new List<string>();
+                }
+                fieldNames.Add(fieldName.image);
+            }
+        label_1:
+            { if (true) return fieldNames; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery OrQuery()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries = null;
+            Token oprt = null;
+            q = AndQuery();
+
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.OR:
+                        ;
+                        break;
+                    default:
+                        jj_la1[0] = jj_gen;
+                        goto label_2;
+                }
+                oprt = Jj_consume_token(RegexpToken.OR);
+                /* keep only last used operator */
+                if (queries == null)
+                {
+                    queries = new List<SrndQuery>();
+                    queries.Add(q);
+                }
+                q = AndQuery();
+                queries.Add(q);
+            }
+        label_2:
+            { if (true) return (queries == null) ? q : GetOrQuery(queries, true /* infix */, oprt); }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery AndQuery()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries = null;
+            Token oprt = null;
+            q = NotQuery();
+
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.AND:
+                        ;
+                        break;
+                    default:
+                        jj_la1[1] = jj_gen;
+                        goto label_3;
+                }
+                oprt = Jj_consume_token(RegexpToken.AND);
+                /* keep only last used operator */
+                if (queries == null)
+                {
+                    queries = new List<SrndQuery>();
+                    queries.Add(q);
+                }
+                q = NotQuery();
+                queries.Add(q);
+            }
+        label_3:
+            { if (true) return (queries == null) ? q : GetAndQuery(queries, true /* infix */, oprt); }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery NotQuery()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries = null;
+            Token oprt = null;
+            q = NQuery();
+
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.NOT:
+                        ;
+                        break;
+                    default:
+                        jj_la1[2] = jj_gen;
+                        goto label_4;
+                }
+                oprt = Jj_consume_token(RegexpToken.NOT);
+                /* keep only last used operator */
+                if (queries == null)
+                {
+                    queries = new List<SrndQuery>();
+                    queries.Add(q);
+                }
+                q = NQuery();
+                queries.Add(q);
+            }
+        label_4:
+            { if (true) return (queries == null) ? q : GetNotQuery(queries, oprt); }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery NQuery()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries;
+            Token dt;
+            q = WQuery();
+
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.N:
+                        ;
+                        break;
+                    default:
+                        jj_la1[3] = jj_gen;
+                        goto label_5;
+                }
+                dt = Jj_consume_token(RegexpToken.N);
+                queries = new List<SrndQuery>();
+                queries.Add(q); /* left associative */
+
+                q = WQuery();
+                queries.Add(q);
+                q = GetDistanceQuery(queries, true /* infix */, dt, false /* not ordered */);
+            }
+        label_5:
+            { if (true) return q; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery WQuery()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries;
+            Token wt;
+            q = PrimaryQuery();
+
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.W:
+                        ;
+                        break;
+                    default:
+                        jj_la1[4] = jj_gen;
+                        goto label_6;
+                }
+                wt = Jj_consume_token(RegexpToken.W);
+                queries = new List<SrndQuery>();
+                queries.Add(q); /* left associative */
+
+                q = PrimaryQuery();
+                queries.Add(q);
+                q = GetDistanceQuery(queries, true /* infix */, wt, true /* ordered */);
+            }
+        label_6:
+            { if (true) return q; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery PrimaryQuery()
+        {
+            /* bracketed weighted query or weighted term */
+            SrndQuery q;
+            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+            {
+                case RegexpToken.LPAREN:
+                    Jj_consume_token(RegexpToken.LPAREN);
+                    q = FieldsQuery();
+                    Jj_consume_token(RegexpToken.RPAREN);
+                    break;
+                case RegexpToken.OR:
+                case RegexpToken.AND:
+                case RegexpToken.W:
+                case RegexpToken.N:
+                    q = PrefixOperatorQuery();
+                    break;
+                case RegexpToken.TRUNCQUOTED:
+                case RegexpToken.QUOTED:
+                case RegexpToken.SUFFIXTERM:
+                case RegexpToken.TRUNCTERM:
+                case RegexpToken.TERM:
+                    q = SimpleTerm();
+                    break;
+                default:
+                    jj_la1[5] = jj_gen;
+                    Jj_consume_token(-1);
+                    throw new ParseException();
+            }
+            OptionalWeights(q);
+            { if (true) return q; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery PrefixOperatorQuery()
+        {
+            Token oprt;
+            IEnumerable<SrndQuery> queries;
+            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+            {
+                case RegexpToken.OR:
+                    oprt = Jj_consume_token(RegexpToken.OR);
+                    /* prefix OR */
+                    queries = FieldsQueryList();
+                    { if (true) return GetOrQuery(queries, false /* not infix */, oprt); }
+                    break;
+                case RegexpToken.AND:
+                    oprt = Jj_consume_token(RegexpToken.AND);
+                    /* prefix AND */
+                    queries = FieldsQueryList();
+                    { if (true) return GetAndQuery(queries, false /* not infix */, oprt); }
+                    break;
+                case RegexpToken.N:
+                    oprt = Jj_consume_token(RegexpToken.N);
+                    /* prefix N */
+                    queries = FieldsQueryList();
+                    { if (true) return GetDistanceQuery(queries, false /* not infix */, oprt, false /* not ordered */); }
+                    break;
+                case RegexpToken.W:
+                    oprt = Jj_consume_token(RegexpToken.W);
+                    /* prefix W */
+                    queries = FieldsQueryList();
+                    { if (true) return GetDistanceQuery(queries, false  /* not infix */, oprt, true /* ordered */); }
+                    break;
+                default:
+                    jj_la1[6] = jj_gen;
+                    Jj_consume_token(-1);
+                    throw new ParseException();
+            }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public IEnumerable<SrndQuery> FieldsQueryList()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries = new List<SrndQuery>();
+            Jj_consume_token(RegexpToken.LPAREN);
+            q = FieldsQuery();
+            queries.Add(q);
+
+            while (true)
+            {
+                Jj_consume_token(RegexpToken.COMMA);
+                q = FieldsQuery();
+                queries.Add(q);
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.COMMA:
+                        ;
+                        break;
+                    default:
+                        jj_la1[7] = jj_gen;
+                        goto label_7;
+                }
+            }
+        label_7:
+            Jj_consume_token(RegexpToken.RPAREN);
+            { if (true) return queries; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery SimpleTerm()
+        {
+            Token term;
+            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+            {
+                case RegexpToken.TERM:
+                    term = Jj_consume_token(RegexpToken.TERM);
+                    { if (true) return GetTermQuery(term.image, false /* not quoted */); }
+                    break;
+                case RegexpToken.QUOTED:
+                    term = Jj_consume_token(RegexpToken.QUOTED);
+                    // TODO: Substring fix
+                    { if (true) return GetTermQuery(term.image.Substring(1, (term.image.Length - 1) - 1), true /* quoted */); }
+                    break;
+                case RegexpToken.SUFFIXTERM:
+                    term = Jj_consume_token(RegexpToken.SUFFIXTERM);
+                    /* ending in * */
+                    if (!AllowedSuffix(term.image))
+                    {
+                        { if (true) throw new ParseException(truncationErrorMessage + term.image); }
+                    }
+                    // TODO: Substring fix
+                    { if (true) return GetPrefixQuery(term.image.Substring(0, term.image.Length - 1), false /* not quoted */); }
+                    break;
+                case RegexpToken.TRUNCTERM:
+                    term = Jj_consume_token(RegexpToken.TRUNCTERM);
+                    /* with at least one * or ? */
+                    if (!AllowedTruncation(term.image))
+                    {
+                        { if (true) throw new ParseException(truncationErrorMessage + term.image); }
+                    }
+                    { if (true) return GetTruncQuery(term.image); }
+                    break;
+                case RegexpToken.TRUNCQUOTED:
+                    term = Jj_consume_token(RegexpToken.TRUNCQUOTED);
+                    /* eg. "9b-b,m"* */
+                    if ((term.image.Length - 3) < minimumPrefixLength)
+                    {
+                        { if (true) throw new ParseException(truncationErrorMessage + term.image); }
+                    }
+                    // TODO: Substring fix
+                    { if (true) return GetPrefixQuery(term.image.Substring(1, (term.image.Length - 2) - 1), true /* quoted */); }
+                    break;
+                default:
+                    jj_la1[8] = jj_gen;
+                    Jj_consume_token(-1);
+                    throw new ParseException();
+            }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public void OptionalWeights(SrndQuery q)
+        {
+            Token weight = null;
+        
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.CARAT:
+                        ;
+                        break;
+                    default:
+                        jj_la1[9] = jj_gen;
+                        goto label_8;
+                }
+                Jj_consume_token(RegexpToken.CARAT);
+                weight = Jj_consume_token(RegexpToken.NUMBER);
+                float f;
+                try
+                {
+                    // TODO: Test parsing float in various cultures (.NET)
+                    f = float.Parse(weight.image);
+                }
+                catch (Exception floatExc)
+                {
+                    { if (true) throw new ParseException(boostErrorMessage + weight.image + " (" + floatExc + ")"); }
+                }
+                if (f <= 0.0)
+                {
+                    { if (true) throw new ParseException(boostErrorMessage + weight.image); }
+                }
+                q.Weight = (f * q.Weight); /* left associative, fwiw */
+            }
+        label_8: ;
+        }
+
+        private bool Jj_2_1(int xla)
+        {
+            jj_la = xla; jj_lastpos = jj_scanpos = token;
+            try { return !Jj_3_1(); }
+            catch (LookaheadSuccess) { return true; }
+            finally { Jj_save(0, xla); }
+        }
+
+        private bool Jj_3_1()
+        {
+            if (Jj_scan_token(RegexpToken.TERM)) return true;
+            if (Jj_scan_token(RegexpToken.COLON)) return true;
+            return false;
+        }
+
+        /** Generated Token Manager. */
+        public QueryParserTokenManager token_source;
+        /** Current token. */
+        public Token token;
+        /** Next token. */
+        public Token jj_nt;
+        private int jj_ntk;
+        private Token jj_scanpos, jj_lastpos;
+        private int jj_la;
+        private int jj_gen;
+        private readonly int[] jj_la1 = new int[10];
+        private static int[] jj_la1_0;
+        static QueryParser()
+        {
+            Jj_la1_init_0();
+        }
+
+        private static void Jj_la1_init_0()
+        {
+            jj_la1_0 = new int[] { 0x100, 0x200, 0x400, 0x1000, 0x800, 0x7c3b00, 0x1b00, 0x8000, 0x7c0000, 0x20000, };
+        }
+        private readonly JJCalls[] jj_2_rtns = new JJCalls[1];
+        private bool jj_rescan = false;
+        private int jj_gc = 0;
+
+        /// <summary>
+        /// Constructor with user supplied CharStream.
+        /// </summary>
+        /// <param name="stream"></param>
+        public QueryParser(ICharStream stream)
+        {
+            token_source = new QueryParserTokenManager(stream);
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        /// <summary>
+        /// Reinitialise.
+        /// </summary>
+        /// <param name="stream"></param>
+        public virtual void ReInit(ICharStream stream)
+        {
+            token_source.ReInit(stream);
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        /// <summary>
+        /// Constructor with generated Token Manager.
+        /// </summary>
+        /// <param name="tm"></param>
+        public QueryParser(QueryParserTokenManager tm)
+        {
+            token_source = tm;
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        /// <summary>
+        /// Reinitialise.
+        /// </summary>
+        /// <param name="tm"></param>
+        public virtual void ReInit(QueryParserTokenManager tm)
+        {
+            token_source = tm;
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        private Token Jj_consume_token(int kind)
+        {
+            Token oldToken;
+            if ((oldToken = token).next != null) token = token.next;
+            else token = token.next = token_source.GetNextToken();
+            jj_ntk = -1;
+            if (token.kind == kind)
+            {
+                jj_gen++;
+                if (++jj_gc > 100)
+                {
+                    jj_gc = 0;
+                    for (int i = 0; i < jj_2_rtns.Length; i++)
+                    {
+                        JJCalls c = jj_2_rtns[i];
+                        while (c != null)
+                        {
+                            if (c.gen < jj_gen) c.first = null;
+                            c = c.next;
+                        }
+                    }
+                }
+                return token;
+            }
+            token = oldToken;
+            jj_kind = kind;
+            throw GenerateParseException();
+        }
+
+        private sealed class LookaheadSuccess : Exception { }
+        private readonly LookaheadSuccess jj_ls = new LookaheadSuccess();
+
+        private bool Jj_scan_token(int kind)
+        {
+            if (jj_scanpos == jj_lastpos)
+            {
+                jj_la--;
+                if (jj_scanpos.next == null)
+                {
+                    jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.GetNextToken();
+                }
+                else
+                {
+                    jj_lastpos = jj_scanpos = jj_scanpos.next;
+                }
+            }
+            else
+            {
+                jj_scanpos = jj_scanpos.next;
+            }
+            if (jj_rescan)
+            {
+                int i = 0; Token tok = token;
+                while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; }
+                if (tok != null) Jj_add_error_token(kind, i);
+            }
+            if (jj_scanpos.kind != kind) return true;
+            if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;
+            return false;
+        }
+
+        /// <summary>
+        /// Get the next Token.
+        /// </summary>
+        /// <returns></returns>
+        public Token GetNextToken()
+        {
+            if (token.next != null) token = token.next;
+            else token = token.next = token_source.GetNextToken();
+            jj_ntk = -1;
+            jj_gen++;
+            return token;
+        }
+
+        /// <summary>
+        /// Get the specific Token.
+        /// </summary>
+        /// <param name="index"></param>
+        /// <returns></returns>
+        public Token GetToken(int index)
+        {
+            Token t = token;
+            for (int i = 0; i < index; i++)
+            {
+                if (t.next != null) t = t.next;
+                else t = t.next = token_source.GetNextToken();
+            }
+            return t;
+        }
+
+        private int Jj_ntk()
+        {
+            if ((jj_nt = token.next) == null)
+                return (jj_ntk = (token.next = token_source.GetNextToken()).kind);
+            else
+                return (jj_ntk = jj_nt.kind);
+        }
+
+        private IList<int[]> jj_expentries = new List<int[]>();
+        private int[] jj_expentry;
+        private int jj_kind = -1;
+        private int[] jj_lasttokens = new int[100];
+        private int jj_endpos;
+
+        private void Jj_add_error_token(int kind, int pos)
+        {
+            if (pos >= 100) return;
+            if (pos == jj_endpos + 1)
+            {
+                jj_lasttokens[jj_endpos++] = kind;
+            }
+            else if (jj_endpos != 0)
+            {
+                jj_expentry = new int[jj_endpos];
+                for (int i = 0; i < jj_endpos; i++)
+                {
+                    jj_expentry[i] = jj_lasttokens[i];
+                }
+                foreach (var oldentry in jj_expentries)
+                {
+                    if (oldentry.Length == jj_expentry.Length)
+                    {
+                        for (int i = 0; i < jj_expentry.Length; i++)
+                        {
+                            if (oldentry[i] != jj_expentry[i])
+                            {
+                                continue;
+                            }
+                        }
+                        jj_expentries.Add(jj_expentry);
+                        break;
+                    }
+                }
+                if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind;
+            }
+        }
+
+        /// <summary>
+        /// Generate ParseException.
+        /// </summary>
+        /// <returns></returns>
+        public virtual ParseException GenerateParseException()
+        {
+            jj_expentries.Clear();
+            bool[] la1tokens = new bool[24];
+            if (jj_kind >= 0)
+            {
+                la1tokens[jj_kind] = true;
+                jj_kind = -1;
+            }
+            for (int i = 0; i < 10; i++)
+            {
+                if (jj_la1[i] == jj_gen)
+                {
+                    for (int j = 0; j < 32; j++)
+                    {
+                        if ((jj_la1_0[i] & (1 << j)) != 0)
+                        {
+                            la1tokens[j] = true;
+                        }
+                    }
+                }
+            }
+            for (int i = 0; i < 24; i++)
+            {
+                if (la1tokens[i])
+                {
+                    jj_expentry = new int[1];
+                    jj_expentry[0] = i;
+                    jj_expentries.Add(jj_expentry);
+                }
+            }
+            jj_endpos = 0;
+            Jj_rescan_token();
+            Jj_add_error_token(0, 0);
+            int[][] exptokseq = new int[jj_expentries.Count][];
+            for (int i = 0; i < jj_expentries.Count; i++)
+            {
+                exptokseq[i] = jj_expentries[i];
+            }
+            return new ParseException(token, exptokseq, QueryParserConstants.TokenImage);
+        }
+
+        /// <summary>Enable tracing. </summary>
+        public void Enable_tracing()
+        {
+        }
+
+        /// <summary>Disable tracing. </summary>
+        public void Disable_tracing()
+        {
+        }
+
+        private void Jj_rescan_token()
+        {
+            jj_rescan = true;
+            for (int i = 0; i < 1; i++)
+            {
+                try
+                {
+                    JJCalls p = jj_2_rtns[i];
+                    do
+                    {
+                        if (p.gen > jj_gen)
+                        {
+                            jj_la = p.arg; jj_lastpos = jj_scanpos = p.first;
+                            switch (i)
+                            {
+                                case 0: Jj_3_1(); break;
+                            }
+                        }
+                        p = p.next;
+                    } while (p != null);
+                }
+                catch (LookaheadSuccess ls) { }
+            }
+            jj_rescan = false;
+        }
+
+        private void Jj_save(int index, int xla)
+        {
+            JJCalls p = jj_2_rtns[index];
+            while (p.gen > jj_gen)
+            {
+                if (p.next == null) { p = p.next = new JJCalls(); break; }
+                p = p.next;
+            }
+            p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla;
+        }
+
+        internal sealed class JJCalls
+        {
+            internal int gen;
+            internal Token first;
+            internal int arg;
+            internal JJCalls next;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
new file mode 100644
index 0000000..262f76b
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
@@ -0,0 +1,120 @@
+\ufeffusing System;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public static class RegexpToken
+    {
+        /// <summary>End of File. </summary>
+        public const int EOF = 0;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _NUM_CHAR = 1;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _TERM_CHAR = 2;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _WHITESPACE = 3;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _STAR = 4;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _ONE_CHAR = 5;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _DISTOP_NUM = 6;
+        /// <summary>RegularExpression Id. </summary>
+        public const int OR = 8;
+        /// <summary>RegularExpression Id. </summary>
+        public const int AND = 9;
+        /// <summary>RegularExpression Id. </summary>
+        public const int NOT = 10;
+        /// <summary>RegularExpression Id. </summary>
+        public const int W = 11;
+        /// <summary>RegularExpression Id. </summary>
+        public const int N = 12;
+        /// <summary>RegularExpression Id. </summary>
+        public const int LPAREN = 13;
+        /// <summary>RegularExpression Id. </summary>
+        public const int RPAREN = 14;
+        /// <summary>RegularExpression Id. </summary>
+        public const int COMMA = 15;
+        /// <summary>RegularExpression Id. </summary>
+        public const int COLON = 16;
+        /// <summary>RegularExpression Id. </summary>
+        public const int CARAT = 17;
+        /// <summary>RegularExpression Id. </summary>
+        public const int TRUNCQUOTED = 18;
+        /// <summary>RegularExpression Id. </summary>
+        public const int QUOTED = 19;
+        /// <summary>RegularExpression Id. </summary>
+        public const int SUFFIXTERM = 20;
+        /// <summary>RegularExpression Id. </summary>
+        public const int TRUNCTERM = 21;
+        /// <summary>RegularExpression Id. </summary>
+        public const int TERM = 22;
+        /// <summary>RegularExpression Id. </summary>
+        public const int NUMBER = 23;
+    }
+
+    public static class LexicalToken
+    {
+        /// <summary>Lexical state.</summary>
+        public const int Boost = 0;
+        /// <summary>Lexical state.</summary>
+        public const int DEFAULT = 2;
+    }
+
+    // NOTE: In Java, this was an interface. However, in 
+    // .NET we cannot define constants in an interface.
+    // So, instead we are making it a static class so it 
+    // can be shared between classes with different base classes.
+
+    // public interface QueryParserConstants
+
+    /// <summary> Token literal values and constants.
+    /// Generated by org.javacc.parser.OtherFilesGen#start()
+    /// </summary>
+    public static class QueryParserConstants
+    {
+        /// <summary>Literal token values. </summary>
+        public static string[] TokenImage = new string[] {
+            "<EOF>",
+            "<_NUM_CHAR>",
+            "<_TERM_CHAR>",
+            "<_WHITESPACE>",
+            "\"*\"",
+            "\"?\"",
+            "<_DISTOP_NUM>",
+            "<token of kind 7>",
+            "<OR>",
+            "<AND>",
+            "<NOT>",
+            "<W>",
+            "<N>",
+            "\"(\"",
+            "\")\"",
+            "\",\"",
+            "\":\"",
+            "\"^\"",
+            "<TRUNCQUOTED>",
+            "<QUOTED>",
+            "<SUFFIXTERM>",
+            "<TRUNCTERM>",
+            "<TERM>",
+            "<NUMBER>"
+        };
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
new file mode 100644
index 0000000..ac3d611
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
@@ -0,0 +1,760 @@
+\ufeffusing System;
+using System.Diagnostics.CodeAnalysis;
+using System.IO;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Token Manager.
+    /// </summary>
+    public class QueryParserTokenManager //: QueryParserConstants
+    {
+        private void InitBlock()
+        {
+            StreamWriter temp_writer;
+            temp_writer = new StreamWriter(Console.OpenStandardOutput(), Console.Out.Encoding);
+            temp_writer.AutoFlush = true;
+            debugStream = temp_writer;
+        }
+
+        /// <summary>Debug output. </summary>
+        public StreamWriter debugStream;
+        /// <summary>Set debug output. </summary>
+        public virtual void SetDebugStream(StreamWriter ds)
+        {
+            debugStream = ds;
+        }
+        private int JjStopStringLiteralDfa_1(int pos, long active0)
+        {
+            switch (pos)
+            {
+                default:
+                    return -1;
+            }
+        }
+        private int JjStartNfa_1(int pos, long active0)
+        {
+            return JjMoveNfa_1(JjStopStringLiteralDfa_1(pos, active0), pos + 1);
+        }
+        private int JjStopAtPos(int pos, int kind)
+        {
+            jjmatchedKind = kind;
+            jjmatchedPos = pos;
+            return pos + 1;
+        }
+        private int jjMoveStringLiteralDfa0_1()
+        {
+            switch (curChar)
+            {
+                case (char)40:
+                    return JjStopAtPos(0, 13);
+                case (char)41:
+                    return JjStopAtPos(0, 14);
+                case (char)44:
+                    return JjStopAtPos(0, 15);
+                case (char)58:
+                    return JjStopAtPos(0, 16);
+                case (char)94:
+                    return JjStopAtPos(0, 17);
+                default:
+                    return JjMoveNfa_1(0, 0);
+            }
+        }
+        internal static readonly ulong[] jjbitVec0 = {
+            0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL
+        };
+        internal static readonly ulong[] jjbitVec2 = {
+            0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
+        };
+        private int JjMoveNfa_1(int startState, int curPos)
+        {
+            int startsAt = 0;
+            jjnewStateCnt = 38;
+            int i = 1;
+            jjstateSet[0] = startState;
+            int kind = 0x7fffffff;
+            for (; ; )
+            {
+                if (++jjround == 0x7fffffff)
+                    ReInitRounds();
+                if (curChar < 64)
+                {
+                    ulong l = (ulong)(1L << (int)curChar);
+                    do
+                    {
+                        switch (jjstateSet[--i])
+                        {
+                            case 0:
+                                if ((0x7bffe8faffffd9ffL & l) != 0L)
+                                {
+                                    if (kind > 22)
+                                        kind = 22;
+                                    JjCheckNAddStates(0, 4);
+                                }
+                                else if ((0x100002600L & l) != 0L)
+                                {
+                                    if (kind > 7)
+                                        kind = 7;
+                                }
+                                else if (curChar == 34)
+                                    JjCheckNAddStates(5, 7);
+                                if ((0x3fc000000000000L & l) != 0L)
+                                    JjCheckNAddStates(8, 11);
+                                else if (curChar == 49)
+                                    JjCheckNAddTwoStates(20, 21);
+                                break;
+                            case 19:
+                                if ((0x3fc000000000000L & l) != 0L)
+                                    JjCheckNAddStates(8, 11);
+                                break;
+                            case 20:
+                                if ((0x3ff000000000000L & l) != 0L)
+                                    JjCheckNAdd(17);
+                                break;
+                            case 21:
+                                if ((0x3ff000000000000L & l) != 0L)
+                                    JjCheckNAdd(18);
+                                break;
+                            case 22:
+                                if (curChar == 49)
+                                    JjCheckNAddTwoStates(20, 21);
+                                break;
+                            case 23:
+                                if (curChar == 34)
+                                    JjCheckNAddStates(5, 7);
+                                break;
+                            case 24:
+                                if ((0xfffffffbffffffffL & l) != (ulong)0L)
+                                    JjCheckNAddTwoStates(24, 25);
+                                break;
+                            case 25:
+                                if (curChar == 34)
+                                    jjstateSet[jjnewStateCnt++] = 26;
+                                break;
+                            case 26:
+                                if (curChar == 42 && kind > 18)
+                                    kind = 18;
+                                break;
+                            case 27:
+                                if ((0xfffffffbffffffffL & l) != (ulong)0L)
+                                    JjCheckNAddStates(12, 14);
+                                break;
+                            case 29:
+                                if (curChar == 34)
+                                    JjCheckNAddStates(12, 14);
+                                break;
+                            case 30:
+                                if (curChar == 34 && kind > 19)
+                                    kind = 19;
+                                break;
+                            case 31:
+                                if ((0x7bffe8faffffd9ffL & l) == 0L)
+                                    break;
+                                if (kind > 22)
+                                    kind = 22;
+                                JjCheckNAddStates(0, 4);
+                                break;
+                            case 32:
+                                if ((0x7bffe8faffffd9ffL & l) != 0L)
+                                    JjCheckNAddTwoStates(32, 33);
+                                break;
+                            case 33:
+                                if (curChar == 42 && kind > 20)
+                                    kind = 20;
+                                break;
+                            case 34:
+                                if ((0x7bffe8faffffd9ffL & l) != 0L)
+                                    JjCheckNAddTwoStates(34, 35);
+                                break;
+                            case 35:
+                                if ((0x8000040000000000L & l) == (ulong)0L)
+                                    break;
+                                if (kind > 21)
+                                    kind = 21;
+                                JjCheckNAddTwoStates(35, 36);
+                                break;
+                            case 36:
+                                if ((0xfbffecfaffffd9ffL & l) == (ulong)0L)
+                                    break;
+                                if (kind > 21)
+                                    kind = 21;
+                                JjCheckNAdd(36);
+                                break;
+                            case 37:
+                                if ((0x7bffe8faffffd9ffL & l) == 0L)
+                                    break;
+                                if (kind > 22)
+                                    kind = 22;
+                                JjCheckNAdd(37);
+                                break;
+                            default: break;
+                        }
+                    } while (i != startsAt);
+                }
+                else if (curChar < 128)
+                {
+                    // NOTE: See the note in the Classic.QueryParserTokenManager.cs file.
+                    // I am working under the assumption 63 is the correct value, since it
+                    // made the tests pass there.
+                    ulong l = (ulong)(1L << (curChar & 63));
+                    //long l = 1L << (curChar & 077);
+                    do
+                    {
+                        switch (jjstateSet[--i])
+                        {
+                            case 0:
+                                if ((0xffffffffbfffffffL & l) != (ulong)0L)
+                                {
+                                    if (kind > 22)
+                                        kind = 22;
+                                    JjCheckNAddStates(0, 4);
+                                }
+                                if ((0x400000004000L & l) != 0L)
+                                {
+                                    if (kind > 12)
+                                        kind = 12;
+                                }
+                                else if ((0x80000000800000L & l) != 0L)
+                                {
+                                    if (kind > 11)
+                                        kind = 11;
+                                }
+                                else if (curChar == 97)
+                                    jjstateSet[jjnewStateCnt++] = 9;
+                                else if (curChar == 65)
+                                    jjstateSet[jjnewStateCnt++] = 6;
+                                else if (curChar == 111)
+                                    jjstateSet[jjnewStateCnt++] = 3;
+                                else if (curChar == 79)
+                                    jjstateSet[jjnewStateCnt++] = 1;
+                                if (curChar == 110)
+                                    jjstateSet[jjnewStateCnt++] = 15;
+                                else if (curChar == 78)
+                                    jjstateSet[jjnewStateCnt++] = 12;
+                                break;
+                            case 1:
+                                if (curChar == 82 && kind > 8)
+                                    kind = 8;
+                                break;
+                            case 2:
+                                if (curChar == 79)
+                                    jjstateSet[jjnewStateCnt++] = 1;
+                                break;
+                            case 3:
+                                if (curChar == 114 && kind > 8)
+                                    kind = 8;
+                                break;
+                            case 4:
+                                if (curChar == 111)
+                                    jjstateSet[jjnewStateCnt++] = 3;
+                                break;
+                            case 5:
+                                if (curChar == 68 && kind > 9)
+                                    kind = 9;
+                                break;
+                            case 6:
+                                if (curChar == 78)
+                                    jjstateSet[jjnewStateCnt++] = 5;
+                                break;
+                            case 7:
+                                if (curChar == 65)
+                                    jjstateSet[jjnewStateCnt++] = 6;
+                                break;
+                            case 8:
+                                if (curChar == 100 && kind > 9)
+                                    kind = 9;
+                                break;
+                            case 9:
+                                if (curChar == 110)
+                                    jjstateSet[jjnewStateCnt++] = 8;
+                                break;
+                            case 10:
+                                if (curChar == 97)
+                                    jjstateSet[jjnewStateCnt++] = 9;
+                                break;
+                            case 11:
+                                if (curChar == 84 && kind > 10)
+                                    kind = 10;
+                                break;
+                            case 12:
+                                if (curChar == 79)
+                                    jjstateSet[jjnewStateCnt++] = 11;
+                                break;
+                            case 13:
+                                if (curChar == 78)
+                                    jjstateSet[jjnewStateCnt++] = 12;
+                                break;
+                            case 14:
+                                if (curChar == 116 && kind > 10)
+                                    kind = 10;
+                                break;
+                            case 15:
+                                if (curChar == 111)
+                                    jjstateSet[jjnewStateCnt++] = 14;
+                                break;
+                            case 16:
+                                if (curChar == 110)
+                                    jjstateSet[jjnewStateCnt++] = 15;
+                                break;
+                            case 17:
+                                if ((0x80000000800000L & l) != 0L && kind > 11)
+                                    kind = 11;
+                                break;
+                            case 18:
+                                if ((0x400000004000L & l) != 0L && kind > 12)
+                                    kind = 12;
+                                break;
+                            case 24:
+                                JjAddStates(15, 16);
+                                break;
+                            case 27:
+                                if ((0xffffffffefffffffL & l) != (ulong)0L)
+                                    JjCheckNAddStates(12, 14);
+                                break;
+                            case 28:
+                                if (curChar == 92)
+                                    jjstateSet[jjnewStateCnt++] = 29;
+                                break;
+                            case 29:
+                                if (curChar == 92)
+                                    JjCheckNAddStates(12, 14);
+                                break;
+                            case 31:
+                                if ((0xffffffffbfffffffL & l) == (ulong)0L)
+                                    break;
+                                if (kind > 22)
+                                    kind = 22;
+                                JjCheckNAddStates(0, 4);
+                                break;
+                            case 32:
+                                if ((0xffffffffbfffffffL & l) != (ulong)0L)
+                                    JjCheckNAddTwoStates(32, 33);
+                                break;
+                            case 34:
+                                if ((0xffffffffbfffffffL & l) != (ulong)0L)
+                                    JjCheckNAddTwoStates(34, 35);
+                                break;
+                            case 36:
+                                if ((0xffffffffbfffffffL & l) == (ulong)0L)
+                                    break;
+                                if (kind > 21)
+                                    kind = 21;
+                                jjstateSet[jjnewStateCnt++] = 36;
+                                break;
+                            case 37:
+                                if ((0xffffffffbfffffffL & l) == (ulong)0L)
+                                    break;
+                                if (kind > 22)
+                                    kind = 22;
+                                JjCheckNAdd(37);
+                                break;
+                            default: break;
+                        }
+                    } while (i != startsAt);
+                }
+                else
+                {
+                    int hiByte = (int)(curChar >> 8);
+                    int i1 = hiByte >> 6;
+                    //long l1 = 1L << (hiByte & 077);
+                    ulong l1 = (ulong)(1L << (hiByte & 63));
+                    int i2 = (curChar & 0xff) >> 6;
+                    //long l2 = 1L << (curChar & 077);
+                    ulong l2 = (ulong)(1L << (curChar & 63));
+                    do
+                    {
+                        switch (jjstateSet[--i])
+                        {
+                            case 0:
+                                if (!JjCanMove_0(hiByte, i1, i2, l1, l2))
+                                    break;
+                                if (kind > 22)
+                                    kind = 22;
+                                JjCheckNAddStates(0, 4);
+                                break;
+                            case 24:
+                                if (JjCanMove_0(hiByte, i1, i2, l1, l2))
+                                    JjAddStates(15, 16);
+                                break;
+                            case 27:
+                                if (JjCanMove_0(hiByte, i1, i2, l1, l2))
+                                    JjAddStates(12, 14);
+                                break;
+                            case 32:
+                                if (JjCanMove_0(hiByte, i1, i2, l1, l2))
+                                    JjCheckNAddTwoStates(32, 33);
+                                break;
+                            case 34:
+                                if (JjCanMove_0(hiByte, i1, i2, l1, l2))
+                                    JjCheckNAddTwoStates(34, 35);
+                                break;
+                            case 36:
+                                if (!JjCanMove_0(hiByte, i1, i2, l1, l2))
+                                    break;
+                                if (kind > 21)
+                                    kind = 21;
+                                jjstateSet[jjnewStateCnt++] = 36;
+                                break;
+                            case 37:
+                                if (!JjCanMove_0(hiByte, i1, i2, l1, l2))
+                                    break;
+                                if (kind > 22)
+                                    kind = 22;
+                                JjCheckNAdd(37);
+                                break;
+                            default: break;
+                        }
+                    } while (i != startsAt);
+                }
+                if (kind != 0x7fffffff)
+                {
+                    jjmatchedKind = kind;
+                    jjmatchedPos = curPos;
+                    kind = 0x7fffffff;
+                }
+                ++curPos;
+                if ((i = jjnewStateCnt) == (startsAt = 38 - (jjnewStateCnt = startsAt)))
+                    return curPos;
+                try { curChar = input_stream.ReadChar(); }
+                catch (System.IO.IOException e) { return curPos; }
+            }
+        }
+
+        private int JjMoveStringLiteralDfa0_0()
+        {
+            return JjMoveNfa_0(0, 0);
+        }
+        private int JjMoveNfa_0(int startState, int curPos)
+        {
+            int startsAt = 0;
+            jjnewStateCnt = 3;
+            int i = 1;
+            jjstateSet[0] = startState;
+            int kind = 0x7fffffff;
+            for (; ; )
+            {
+                if (++jjround == 0x7fffffff)
+                    ReInitRounds();
+                if (curChar < 64)
+                {
+                    long l = 1L << curChar;
+                    do
+                    {
+                        switch (jjstateSet[--i])
+                        {
+                            case 0:
+                                if ((0x3ff000000000000L & l) == 0L)
+                                    break;
+                                if (kind > 23)
+                                    kind = 23;
+                                JjAddStates(17, 18);
+                                break;
+                            case 1:
+                                if (curChar == 46)
+                                    JjCheckNAdd(2);
+                                break;
+                            case 2:
+                                if ((0x3ff000000000000L & l) == 0L)
+                                    break;
+                                if (kind > 23)
+                                    kind = 23;
+                                JjCheckNAdd(2);
+                                break;
+                            default: break;
+                        }
+                    } while (i != startsAt);
+                }
+                else if (curChar < 128)
+                {
+                    //long l = 1L << (curChar & 077);
+                    ulong l = (ulong)(1L << (curChar & 63)); 
+                    do
+                    {
+                        switch (jjstateSet[--i])
+                        {
+                            default: break;
+                        }
+                    } while (i != startsAt);
+                }
+                else
+                {
+                    int hiByte = (int)(curChar >> 8);
+                    int i1 = hiByte >> 6;
+                    //long l1 = 1L << (hiByte & 077);
+                    ulong l1 = (ulong)(1L << (hiByte & 63));
+                    int i2 = (curChar & 0xff) >> 6;
+                    //long l2 = 1L << (curChar & 077);
+                    ulong l2 = (ulong)(1L << (curChar & 63));
+                    do
+                    {
+                        switch (jjstateSet[--i])
+                        {
+                            default: break;
+                        }
+                    } while (i != startsAt);
+                }
+                if (kind != 0x7fffffff)
+                {
+                    jjmatchedKind = kind;
+                    jjmatchedPos = curPos;
+                    kind = 0x7fffffff;
+                }
+                ++curPos;
+                if ((i = jjnewStateCnt) == (startsAt = 3 - (jjnewStateCnt = startsAt)))
+                    return curPos;
+                try { curChar = input_stream.ReadChar(); }
+                catch (System.IO.IOException e) { return curPos; }
+            }
+        }
+        internal static readonly int[] jjnextStates = {
+            32, 33, 34, 35, 37, 24, 27, 28, 20, 17, 21, 18, 27, 28, 30, 24, 
+            25, 0, 1, 
+        };
+        private static bool JjCanMove_0(int hiByte, int i1, int i2, ulong l1, ulong l2)
+        {
+            switch (hiByte)
+            {
+                case 0:
+                    return ((jjbitVec2[i2] & l2) != 0L);
+                default:
+                    if ((jjbitVec0[i1] & l1) != 0L)
+                        return true;
+                    return false;
+            }
+        }
+
+        /** Token literal values. */
+        //public static readonly string[] jjstrLiteralImages = {
+        //    "", null, null, null, null, null, null, null, null, null, null, null, null, 
+        //    "\50", "\51", "\54", "\72", "\136", null, null, null, null, null, null 
+        //};
+
+        public static readonly string[] jjstrLiteralImages = {
+            "", null, null, null, null, null, null, null, null, null, null, null, null, 
+            "\x0028" /*"\50"*/, "\x0029" /*"\51"*/, "\x002C" /*"\54"*/, "\x003A" /*"\72"*/, "\x005E" /*"\136"*/, null, null, null, null, null, null 
+        };
+
+        /** Lexer state names. */
+        public static readonly string[] lexStateNames = {
+           "Boost",
+           "DEFAULT"
+        };
+
+        /** Lex State array. */
+        public static readonly int[] jjnewLexState = {
+           -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, -1, -1, -1, -1, -1, 1, 
+        };
+        internal static readonly long[] jjtoToken = {
+           0xffff01L, 
+        };
+        internal static readonly long[] jjtoSkip = {
+           0x80L, 
+        };
+        protected ICharStream input_stream;
+        private readonly uint[] jjrounds = new uint[38];
+        private readonly int[] jjstateSet = new int[76];
+        protected internal char curChar;
+
+        /** Constructor. */
+        public QueryParserTokenManager(ICharStream stream)
+        {
+            InitBlock();
+            input_stream = stream;
+        }
+
+        /** Constructor. */
+        public QueryParserTokenManager(ICharStream stream, int lexState)
+            : this(stream)
+        {
+            SwitchTo(lexState);
+        }
+
+        /** Reinitialise parser. */
+        public void ReInit(ICharStream stream)
+        {
+            jjmatchedPos = jjnewStateCnt = 0;
+            curLexState = defaultLexState;
+            input_stream = stream;
+            ReInitRounds();
+        }
+        private void ReInitRounds()
+        {
+            int i;
+            jjround = 0x80000001;
+            for (i = 38; i-- > 0; )
+                jjrounds[i] = 0x80000000;
+        }
+
+        /** Reinitialise parser. */
+        public void ReInit(ICharStream stream, int lexState)
+        {
+            ReInit(stream);
+            SwitchTo(lexState);
+        }
+
+        /** Switch to specified lex state. */
+        public void SwitchTo(int lexState)
+        {
+            if (lexState >= 2 || lexState < 0)
+                throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
+            else
+                curLexState = lexState;
+        }
+
+        protected Token JjFillToken()
+        {
+            Token t;
+            string curTokenImage;
+            int beginLine;
+            int endLine;
+            int beginColumn;
+            int endColumn;
+            string im = jjstrLiteralImages[jjmatchedKind];
+            curTokenImage = (im == null) ? input_stream.Image : im;
+            beginLine = input_stream.BeginLine;
+            beginColumn = input_stream.BeginColumn;
+            endLine = input_stream.EndLine;
+            endColumn = input_stream.EndColumn;
+            t = Token.NewToken(jjmatchedKind, curTokenImage);
+
+            t.beginLine = beginLine;
+            t.endLine = endLine;
+            t.beginColumn = beginColumn;
+            t.endColumn = endColumn;
+
+            return t;
+        }
+
+        internal int curLexState = 1;
+        internal int defaultLexState = 1;
+        internal int jjnewStateCnt;
+        internal uint jjround;
+        internal int jjmatchedPos;
+        internal int jjmatchedKind;
+
+        /// <summary>Get the next Token.</summary>
+        [SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate")]
+        public Token GetNextToken()
+        {
+            Token matchedToken;
+            int curPos = 0;
+
+            for (; ; )
+            {
+                try
+                {
+                    curChar = input_stream.BeginToken();
+                }
+                catch (System.IO.IOException e)
+                {
+                    jjmatchedKind = 0;
+                    matchedToken = JjFillToken();
+                    return matchedToken;
+                }
+
+                switch (curLexState)
+                {
+                    case 0:
+                        jjmatchedKind = 0x7fffffff;
+                        jjmatchedPos = 0;
+                        curPos = JjMoveStringLiteralDfa0_0();
+                        break;
+                    case 1:
+                        jjmatchedKind = 0x7fffffff;
+                        jjmatchedPos = 0;
+                        curPos = jjMoveStringLiteralDfa0_1();
+                        break;
+                }
+                if (jjmatchedKind != 0x7fffffff)
+                {
+                    if (jjmatchedPos + 1 < curPos)
+                        input_stream.Backup(curPos - jjmatchedPos - 1);
+                    if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
+                    {
+                        matchedToken = JjFillToken();
+                        if (jjnewLexState[jjmatchedKind] != -1)
+                            curLexState = jjnewLexState[jjmatchedKind];
+                        return matchedToken;
+                    }
+                    else
+                    {
+                        if (jjnewLexState[jjmatchedKind] != -1)
+                            curLexState = jjnewLexState[jjmatchedKind];
+                        goto EOFLoop;
+                    }
+                }
+                int error_line = input_stream.EndLine;
+                int error_column = input_stream.EndColumn;
+                string error_after = null;
+                bool EOFSeen = false;
+                try { input_stream.ReadChar(); input_stream.Backup(1); }
+                catch (System.IO.IOException e1)
+                {
+                    EOFSeen = true;
+                    error_after = curPos <= 1 ? "" : input_stream.Image;
+                    if (curChar == '\n' || curChar == '\r')
+                    {
+                        error_line++;
+                        error_column = 0;
+                    }
+                    else
+                        error_column++;
+                }
+                if (!EOFSeen)
+                {
+                    input_stream.Backup(1);
+                    error_after = curPos <= 1 ? "" : input_stream.Image;
+                }
+                throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
+            EOFLoop: ;
+            }
+        }
+
+        private void JjCheckNAdd(int state)
+        {
+            if (jjrounds[state] != jjround)
+            {
+                jjstateSet[jjnewStateCnt++] = state;
+                jjrounds[state] = jjround;
+            }
+        }
+        private void JjAddStates(int start, int end)
+        {
+            do
+            {
+                jjstateSet[jjnewStateCnt++] = jjnextStates[start];
+            } while (start++ != end);
+        }
+        private void JjCheckNAddTwoStates(int state1, int state2)
+        {
+            JjCheckNAdd(state1);
+            JjCheckNAdd(state2);
+        }
+
+        private void JjCheckNAddStates(int start, int end)
+        {
+            do
+            {
+                JjCheckNAdd(jjnextStates[start]);
+            } while (start++ != end);
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Parser/Token.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/Token.cs b/src/Lucene.Net.QueryParser/Surround/Parser/Token.cs
new file mode 100644
index 0000000..2d9b83d
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/Token.cs
@@ -0,0 +1,142 @@
+\ufeffusing System;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+	
+	/// <summary> 
+    /// Describes the input token stream.
+    /// </summary>
+    [Serializable]
+	public class Token
+	{
+		
+		/// <summary> 
+        /// An integer that describes the kind of this token.  This numbering
+		/// system is determined by JavaCCParser, and a table of these numbers is
+		/// stored in the file ...Constants.java.
+		/// </summary>
+		public int kind;
+		
+		/// <summary>The line number of the first character of this Token. </summary>
+		public int beginLine;
+		/// <summary>The column number of the first character of this Token. </summary>
+		public int beginColumn;
+		/// <summary>The line number of the last character of this Token. </summary>
+		public int endLine;
+		/// <summary>The column number of the last character of this Token. </summary>
+		public int endColumn;
+		
+		/// <summary>The string image of the token.</summary>
+		public string image;
+		
+		/// <summary> 
+        /// A reference to the next regular (non-special) token from the input
+		/// stream.  If this is the last token from the input stream, or if the
+		/// token manager has not read tokens beyond this one, this field is
+		/// set to null.  This is true only if this token is also a regular
+		/// token.  Otherwise, see below for a description of the contents of
+		/// this field.
+		/// </summary>
+		public Token next;
+		
+		/// <summary> 
+        /// This field is used to access special tokens that occur prior to this
+		/// token, but after the immediately preceding regular (non-special) token.
+		/// If there are no such special tokens, this field is set to null.
+		/// When there are more than one such special token, this field refers
+		/// to the last of these special tokens, which in turn refers to the next
+		/// previous special token through its specialToken field, and so on
+		/// until the first special token (whose specialToken field is null).
+		/// The next fields of special tokens refer to other special tokens that
+		/// immediately follow it (without an intervening regular token).  If there
+		/// is no such token, this field is null.
+		/// </summary>
+		public Token specialToken;
+
+	    /// <summary> 
+        /// An optional attribute value of the Token.
+	    /// Tokens which are not used as syntactic sugar will often contain
+	    /// meaningful values that will be used later on by the compiler or
+	    /// interpreter. This attribute value is often different from the image.
+	    /// Any subclass of Token that actually wants to return a non-null value can
+	    /// override this method as appropriate.
+	    /// </summary>
+	    public virtual object Value
+	    {
+	        get { return null; }
+	    }
+
+	    /// <summary> 
+        /// No-argument constructor
+        /// </summary>
+		public Token()
+		{
+		}
+		
+		/// <summary> 
+        /// Constructs a new token for the specified Image.
+        /// </summary>
+		public Token(int kind)
+            : this(kind, null)
+		{
+		}
+		
+		/// <summary> 
+        /// Constructs a new token for the specified Image and Kind.
+        /// </summary>
+		public Token(int kind, string image)
+		{
+			this.kind = kind;
+			this.image = image;
+		}
+		
+		/// <summary> 
+        /// Returns the image.
+        /// </summary>
+		public override string ToString()
+		{
+			return image;
+		}
+		
+		/// <summary> 
+        /// Returns a new Token object, by default. However, if you want, you
+		/// can create and return subclass objects based on the value of ofKind.
+		/// Simply add the cases to the switch for all those special cases.
+		/// For example, if you have a subclass of Token called IDToken that
+		/// you want to create if ofKind is ID, simply add something like :
+		/// 
+		/// case MyParserConstants.ID : return new IDToken(ofKind, image);
+		/// 
+		/// to the following switch statement. Then you can cast matchedToken
+		/// variable to the appropriate type and use sit in your lexical actions.
+		/// </summary>
+		public static Token NewToken(int ofKind, string image)
+		{
+			switch (ofKind)
+			{
+				default:  return new Token(ofKind, image);
+			}
+		}
+		
+		public static Token NewToken(int ofKind)
+		{
+			return NewToken(ofKind, null);
+		}
+	}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs b/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
new file mode 100644
index 0000000..2ccfc58
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
@@ -0,0 +1,170 @@
+\ufeffusing System;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+	/// <summary>Token Manager Error. </summary>
+	[Serializable]
+	public class TokenMgrError : Exception
+	{
+        /*
+		* Ordinals for various reasons why an Error of this type can be thrown.
+		*/
+
+        /// <summary> Lexical error occurred.</summary>
+        internal const int LEXICAL_ERROR = 0;
+
+        /// <summary> An attempt was made to create a second instance of a static token manager.</summary>
+        internal const int STATIC_LEXER_ERROR = 1;
+
+        /// <summary> Tried to change to an invalid lexical state.</summary>
+        internal const int INVALID_LEXICAL_STATE = 2;
+
+        /// <summary> Detected (and bailed out of) an infinite loop in the token manager.</summary>
+        internal const int LOOP_DETECTED = 3;
+
+        /// <summary> Indicates the reason why the exception is thrown. It will have
+        /// one of the above 4 values.
+        /// </summary>
+        internal int errorCode;
+
+        /// <summary> 
+        /// Replaces unprintable characters by their escaped (or unicode escaped)
+        /// equivalents in the given string
+        /// </summary>
+        protected internal static string AddEscapes(string str)
+        {
+            StringBuilder retval = new StringBuilder();
+            char ch;
+            for (int i = 0; i < str.Length; i++)
+            {
+                switch (str[i])
+                {
+
+                    case (char)(0):
+                        continue;
+
+                    case '\b':
+                        retval.Append("\\b");
+                        continue;
+
+                    case '\t':
+                        retval.Append("\\t");
+                        continue;
+
+                    case '\n':
+                        retval.Append("\\n");
+                        continue;
+
+                    case '\f':
+                        retval.Append("\\f");
+                        continue;
+
+                    case '\r':
+                        retval.Append("\\r");
+                        continue;
+
+                    case '\"':
+                        retval.Append("\\\"");
+                        continue;
+
+                    case '\'':
+                        retval.Append("\\\'");
+                        continue;
+
+                    case '\\':
+                        retval.Append("\\\\");
+                        continue;
+
+                    default:
+                        if ((ch = str[i]) < 0x20 || ch > 0x7e)
+                        {
+                            string s = "0000" + Convert.ToString(ch, 16);
+                            retval.Append("\\u" + s.Substring(s.Length - 4, (s.Length) - (s.Length - 4)));
+                        }
+                        else
+                        {
+                            retval.Append(ch);
+                        }
+                        continue;
+
+                }
+            }
+            return retval.ToString();
+        }
+
+        /// <summary>
+        /// Returns a detailed message for the Error when it is thrown by the
+        /// token manager to indicate a lexical error.
+        /// </summary>
+        /// <remarks>You can customize the lexical error message by modifying this method.</remarks>
+        /// <param name="EOFSeen">indicates if EOF caused the lexical error</param>
+        /// <param name="lexState">lexical state in which this error occurred</param>
+        /// <param name="errorLine">line number when the error occurred</param>
+        /// <param name="errorColumn">column number when the error occurred</param>
+        /// <param name="errorAfter">prefix that was seen before this error occurred</param>
+        /// <param name="curChar">the offending character</param>
+        /// <returns>Detailed error message</returns>
+        protected internal static string LexicalError(bool EOFSeen, int lexState, int errorLine, int errorColumn, string errorAfter, char curChar)
+        {
+            return ("Lexical error at line " +
+                errorLine + ", column " +
+                errorColumn + ".  Encountered: " +
+                (EOFSeen ? "<EOF> " : ("\"" + AddEscapes(Convert.ToString(curChar)) + "\"") + " (" + (int)curChar + "), ") +
+                "after : \"" + AddEscapes(errorAfter) + "\"");
+        }
+
+		/// <summary> 
+        /// You can also modify the body of this method to customize your error messages.
+		/// For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not
+		/// of end-users concern, so you can return something like :
+		/// 
+		/// "Internal Error : Please file a bug report .... "
+		/// 
+		/// from this method for such cases in the release version of your parser.
+		/// </summary>
+		public override string Message
+		{
+			get { return base.Message; }
+		}
+		
+		/*
+		* Constructors of various flavors follow.
+		*/
+		
+		/// <summary>No arg constructor. </summary>
+		public TokenMgrError()
+		{
+		}
+		
+		/// <summary>Constructor with message and reason. </summary>
+		public TokenMgrError(string message, int reason)
+            : base(message)
+		{
+			errorCode = reason;
+		}
+		
+		/// <summary>Full Constructor. </summary>
+		public TokenMgrError(bool EOFSeen, int lexState, int errorLine, int errorColumn, string errorAfter, char curChar, int reason)
+            : this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason)
+		{
+		}
+	}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs
new file mode 100644
index 0000000..aa00e0d
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs
@@ -0,0 +1,39 @@
+\ufeffusing Lucene.Net.Search;
+using System.Collections.Generic;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Factory for conjunctions
+    /// </summary>
+    public class AndQuery : ComposedQuery
+    {
+        public AndQuery(IEnumerable<SrndQuery> queries, bool inf, string opName)
+            : base(queries, inf, opName)
+        {
+        }
+
+        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
+        {
+            return SrndBooleanQuery.MakeBooleanQuery( /* subqueries can be individually boosted */
+              MakeLuceneSubQueriesField(fieldName, qf), BooleanClause.Occur.MUST);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs b/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
new file mode 100644
index 0000000..8992746
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
@@ -0,0 +1,110 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Search.Spans;
+using System.Runtime.CompilerServices;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+
+     // Create basic queries to be used during rewrite.
+     // The basic queries are TermQuery and SpanTermQuery.
+     // An exception can be thrown when too many of these are used.
+     // SpanTermQuery and TermQuery use IndexReader.termEnum(Term), which causes the buffer usage.
+     
+     // Use this class to limit the buffer usage for reading terms from an index.
+     // Default is 1024, the same as the max. number of subqueries for a BooleanQuery.
+
+
+
+    /// <summary>
+    /// Factory for creating basic term queries
+    /// </summary>
+    public class BasicQueryFactory
+    {
+        public BasicQueryFactory(int maxBasicQueries)
+        {
+            this.maxBasicQueries = maxBasicQueries;
+            this.queriesMade = 0;
+        }
+
+        public BasicQueryFactory()
+            : this(1024)
+        {
+        }
+
+        private int maxBasicQueries;
+        private int queriesMade;
+
+        public int NrQueriesMade { get { return queriesMade; } }
+        public int MaxBasicQueries { get { return maxBasicQueries; } }
+
+        public override string ToString()
+        {
+            return GetType().Name
+                + "(maxBasicQueries: " + maxBasicQueries
+                + ", queriesMade: " + queriesMade
+                + ")";
+        }
+
+        private bool AtMax
+        {
+            get { return queriesMade >= maxBasicQueries; }
+        }
+
+        [MethodImpl(MethodImplOptions.Synchronized)]
+        protected virtual void CheckMax()
+        {
+            if (AtMax)
+                throw new TooManyBasicQueries(MaxBasicQueries);
+            queriesMade++;
+        }
+
+        public TermQuery NewTermQuery(Term term)
+        {
+            CheckMax();
+            return new TermQuery(term);
+        }
+
+        public SpanTermQuery NewSpanTermQuery(Term term)
+        {
+            CheckMax();
+            return new SpanTermQuery(term);
+        }
+
+        public override int GetHashCode()
+        {
+            return GetType().GetHashCode() ^ (AtMax ? 7 : 31 * 32);
+        }
+
+        /// <summary>
+        /// Two BasicQueryFactory's are equal when they generate
+        /// the same types of basic queries, or both cannot generate queries anymore.
+        /// </summary>
+        /// <param name="obj"></param>
+        /// <returns></returns>
+        public override bool Equals(object obj)
+        {
+            if (!(obj is BasicQueryFactory))
+                return false;
+            BasicQueryFactory other = (BasicQueryFactory)obj;
+            return AtMax == other.AtMax;
+        }
+    }
+}


[19/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs b/src/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
new file mode 100644
index 0000000..b879008
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
@@ -0,0 +1,1523 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.QueryParser.Classic;
+using Lucene.Net.QueryParser.Flexible.Standard;
+using Lucene.Net.Search;
+using Lucene.Net.Store;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using Lucene.Net.Util.Automaton;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Globalization;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace Lucene.Net.QueryParser.Util
+{
+    /// <summary>
+    /// In .NET the abstact members were moved to AbstractQueryParserTestBase
+    /// because the Visual Studio test runner does not find or run tests in 
+    /// abstract classes.
+    /// </summary>
+    [TestFixture]
+    public class QueryParserTestBase : AbstractQueryParserTestBase
+    {
+        public static Analyzer qpAnalyzer;
+
+        [TestFixtureSetUp]
+        public static void BeforeClass()
+        {
+            qpAnalyzer = new QPTestAnalyzer();
+        }
+
+        [TestFixtureTearDown]
+        public static void AfterClass()
+        {
+            qpAnalyzer = null;
+        }
+
+        public sealed class QPTestFilter : TokenFilter
+        {
+            ICharTermAttribute termAtt;
+            IOffsetAttribute offsetAtt;
+
+            /**
+             * Filter which discards the token 'stop' and which expands the
+             * token 'phrase' into 'phrase1 phrase2'
+             */
+            public QPTestFilter(TokenStream @in)
+                : base(@in)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+                offsetAtt = AddAttribute<IOffsetAttribute>();
+            }
+
+            bool inPhrase = false;
+            int savedStart = 0, savedEnd = 0;
+
+            public override sealed bool IncrementToken()
+            {
+                if (inPhrase)
+                {
+                    inPhrase = false;
+                    ClearAttributes();
+                    termAtt.Append("phrase2");
+                    offsetAtt.SetOffset(savedStart, savedEnd);
+                    return true;
+                }
+                else
+                    while (input.IncrementToken())
+                    {
+                        if (termAtt.toString().Equals("phrase"))
+                        {
+                            inPhrase = true;
+                            savedStart = offsetAtt.StartOffset();
+                            savedEnd = offsetAtt.EndOffset();
+                            termAtt.SetEmpty().Append("phrase1");
+                            offsetAtt.SetOffset(savedStart, savedEnd);
+                            return true;
+                        }
+                        else if (!termAtt.toString().equals("stop"))
+                            return true;
+                    }
+                return false;
+            }
+        }
+
+        public sealed class QPTestAnalyzer : Analyzer
+        {
+            /// <summary>
+            /// Filters MockTokenizer with StopFilter.
+            /// </summary>
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
+                return new TokenStreamComponents(tokenizer, new QPTestFilter(tokenizer));
+            }
+        }
+
+        private int originalMaxClauses;
+
+        private string defaultField = "field";
+        public string DefaultField { get { return defaultField; } set { defaultField = value; } }
+
+        public override void SetUp()
+        {
+            base.SetUp();
+            originalMaxClauses = BooleanQuery.MaxClauseCount;
+        }
+
+        // Moved from TestQueryParser
+        public virtual Classic.QueryParser GetParser(Analyzer a)
+        {
+            if (a == null) a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+            Classic.QueryParser qp = new Classic.QueryParser(TEST_VERSION_CURRENT, DefaultField, a);
+            qp.DefaultOperator = (QueryParserBase.OR_OPERATOR);
+            return qp;
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a)
+        {
+            return GetParser(a);
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC)
+        {
+            Debug.Assert(cqpC is Classic.QueryParser);
+            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
+            qp.DefaultOperator = QueryParserBase.Operator.OR;
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC)
+        {
+            Debug.Assert(cqpC is Classic.QueryParser);
+            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
+            qp.DefaultOperator = QueryParserBase.Operator.AND;
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC, bool value)
+        {
+            Debug.Assert(cqpC is Classic.QueryParser);
+            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
+            qp.AnalyzeRangeTerms = (value);
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC, bool value)
+        {
+            Debug.Assert(cqpC is Classic.QueryParser);
+            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
+            qp.AutoGeneratePhraseQueries = value;
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void SetDateResolution(ICommonQueryParserConfiguration cqpC, ICharSequence field, DateTools.Resolution value)
+        {
+            Debug.Assert(cqpC is Classic.QueryParser);
+            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
+            qp.SetDateResolution(field.toString(), value);
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override Query GetQuery(string query, ICommonQueryParserConfiguration cqpC)
+        {
+            Debug.Assert(cqpC != null, "Parameter must not be null");
+            Debug.Assert(cqpC is Classic.QueryParser, "Parameter must be instance of QueryParser");
+            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
+            return qp.Parse(query);
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override Query GetQuery(string query, Analyzer a)
+        {
+            return GetParser(a).Parse(query);
+        }
+
+
+        // Moved to AbstractQueryParserTestBase
+        public override bool IsQueryParserException(Exception exception)
+        {
+            return exception is ParseException;
+        }
+
+        public Query GetQuery(string query)
+        {
+            return GetQuery(query, (Analyzer)null);
+        }
+
+        public void AssertQueryEquals(string query, Analyzer a, string result)
+        {
+            Query q = GetQuery(query, a);
+            string s = q.ToString("field");
+            if (!s.equals(result))
+            {
+                fail("Query /" + query + "/ yielded /" + s
+                     + "/, expecting /" + result + "/");
+            }
+        }
+
+        public void AssertQueryEquals(ICommonQueryParserConfiguration cqpC, string field, string query, string result)
+        {
+            Query q = GetQuery(query, cqpC);
+            string s = q.ToString(field);
+            if (!s.Equals(result))
+            {
+                fail("Query /" + query + "/ yielded /" + s
+                     + "/, expecting /" + result + "/");
+            }
+        }
+
+        public void AssertEscapedQueryEquals(string query, Analyzer a, string result)
+        {
+            string escapedQuery = QueryParserBase.Escape(query);
+            if (!escapedQuery.Equals(result))
+            {
+                fail("Query /" + query + "/ yielded /" + escapedQuery
+                    + "/, expecting /" + result + "/");
+            }
+        }
+
+        public void AssertWildcardQueryEquals(string query, bool lowercase, string result, bool allowLeadingWildcard)
+        {
+            ICommonQueryParserConfiguration cqpC = GetParserConfig(null);
+            cqpC.LowercaseExpandedTerms = lowercase;
+            cqpC.AllowLeadingWildcard = allowLeadingWildcard;
+            Query q = GetQuery(query, cqpC);
+            string s = q.ToString("field");
+            if (!s.equals(result))
+            {
+                fail("WildcardQuery /" + query + "/ yielded /" + s
+                     + "/, expecting /" + result + "/");
+            }
+        }
+
+        public void AssertWildcardQueryEquals(string query, bool lowercase, string result)
+        {
+            AssertWildcardQueryEquals(query, lowercase, result, false);
+        }
+
+        public void AssertWildcardQueryEquals(string query, string result)
+        {
+            Query q = GetQuery(query);
+            string s = q.ToString("field");
+            if (!s.Equals(result))
+            {
+                fail("WildcardQuery /" + query + "/ yielded /" + s + "/, expecting /"
+                    + result + "/");
+            }
+        }
+
+        public Query GetQueryDOA(string query, Analyzer a)
+        {
+            if (a == null)
+                a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+            ICommonQueryParserConfiguration qp = GetParserConfig(a);
+            SetDefaultOperatorAND(qp);
+            return GetQuery(query, qp);
+        }
+
+        public void AssertQueryEqualsDOA(string query, Analyzer a, string result)
+        {
+            Query q = GetQueryDOA(query, a);
+            string s = q.ToString("field");
+            if (!s.Equals(result))
+            {
+                fail("Query /" + query + "/ yielded /" + s
+                     + "/, expecting /" + result + "/");
+            }
+        }
+
+        [Test]
+        public void TestCJK()
+        {
+            // Test Ideographic Space - As wide as a CJK character cell (fullwidth)
+            // used google to translate the word "term" to japanese -> \u7528\u8a9e
+            AssertQueryEquals("term\u3000term\u3000term", null, "term\u0020term\u0020term");
+            AssertQueryEquals("\u7528\u8a9e\u3000\u7528\u8a9e\u3000\u7528\u8a9e", null, "\u7528\u8a9e\u0020\u7528\u8a9e\u0020\u7528\u8a9e");
+        }
+
+        protected class SimpleCJKTokenizer : Tokenizer
+        {
+            private ICharTermAttribute termAtt;
+
+            public SimpleCJKTokenizer(System.IO.TextReader input)
+                : base(input)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+            }
+
+            public override sealed bool IncrementToken()
+            {
+                int ch = input.Read();
+                if (ch < 0)
+                    return false;
+                ClearAttributes();
+                termAtt.SetEmpty().Append((char)ch);
+                return true;
+            }
+        }
+
+        private class SimpleCJKAnalyzer : Analyzer
+        {
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                return new TokenStreamComponents(new SimpleCJKTokenizer(reader));
+            }
+        }
+
+        [Test]
+        public void TestCJKTerm()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "\u4e2d")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, GetQuery("\u4e2d\u56fd", analyzer));
+        }
+
+        [Test]
+        public void TestCJKBoostedTerm()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            BooleanQuery expected = new BooleanQuery();
+            expected.Boost = (0.5f);
+            expected.Add(new TermQuery(new Term("field", "\u4e2d")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, GetQuery("\u4e2d\u56fd^0.5", analyzer));
+        }
+
+        [Test]
+        public void TestCJKPhrase()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            PhraseQuery expected = new PhraseQuery();
+            expected.Add(new Term("field", "\u4e2d"));
+            expected.Add(new Term("field", "\u56fd"));
+
+            assertEquals(expected, GetQuery("\"\u4e2d\u56fd\"", analyzer));
+        }
+
+        [Test]
+        public void TestCJKBoostedPhrase()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            PhraseQuery expected = new PhraseQuery();
+            expected.Boost = (0.5f);
+            expected.Add(new Term("field", "\u4e2d"));
+            expected.Add(new Term("field", "\u56fd"));
+
+            assertEquals(expected, GetQuery("\"\u4e2d\u56fd\"^0.5", analyzer));
+        }
+
+        [Test]
+        public void TestCJKSloppyPhrase()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            PhraseQuery expected = new PhraseQuery();
+            expected.Slop = (3);
+            expected.Add(new Term("field", "\u4e2d"));
+            expected.Add(new Term("field", "\u56fd"));
+
+            assertEquals(expected, GetQuery("\"\u4e2d\u56fd\"~3", analyzer));
+        }
+
+        [Test]
+        public void TestAutoGeneratePhraseQueriesOn()
+        {
+            // individual CJK chars as terms
+            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
+
+            PhraseQuery expected = new PhraseQuery();
+            expected.Add(new Term("field", "\u4e2d"));
+            expected.Add(new Term("field", "\u56fd"));
+            ICommonQueryParserConfiguration qp = GetParserConfig(analyzer);
+            SetAutoGeneratePhraseQueries(qp, true);
+            assertEquals(expected, GetQuery("\u4e2d\u56fd", qp));
+        }
+
+        [Test]
+        public void TestSimple()
+        {
+            AssertQueryEquals("term term term", null, "term term term");
+            AssertQueryEquals("t�rm term term", new MockAnalyzer(Random()), "t�rm term term");
+            AssertQueryEquals("�mlaut", new MockAnalyzer(Random()), "�mlaut");
+
+            // FIXME: enhance MockAnalyzer to be able to support this
+            // it must no longer extend CharTokenizer
+            //AssertQueryEquals("\"\"", new KeywordAnalyzer(), "");
+            //AssertQueryEquals("foo:\"\"", new KeywordAnalyzer(), "foo:");
+
+            AssertQueryEquals("a AND b", null, "+a +b");
+            AssertQueryEquals("(a AND b)", null, "+a +b");
+            AssertQueryEquals("c OR (a AND b)", null, "c (+a +b)");
+            AssertQueryEquals("a AND NOT b", null, "+a -b");
+            AssertQueryEquals("a AND -b", null, "+a -b");
+            AssertQueryEquals("a AND !b", null, "+a -b");
+            AssertQueryEquals("a && b", null, "+a +b");
+            //    AssertQueryEquals("a && ! b", null, "+a -b");
+
+            AssertQueryEquals("a OR b", null, "a b");
+            AssertQueryEquals("a || b", null, "a b");
+            AssertQueryEquals("a OR !b", null, "a -b");
+            //    AssertQueryEquals("a OR ! b", null, "a -b");
+            AssertQueryEquals("a OR -b", null, "a -b");
+
+            AssertQueryEquals("+term -term term", null, "+term -term term");
+            AssertQueryEquals("foo:term AND field:anotherTerm", null,
+                              "+foo:term +anotherterm");
+            AssertQueryEquals("term AND \"phrase phrase\"", null,
+                              "+term +\"phrase phrase\"");
+            AssertQueryEquals("\"hello there\"", null, "\"hello there\"");
+            assertTrue(GetQuery("a AND b") is BooleanQuery);
+            assertTrue(GetQuery("hello") is TermQuery);
+            assertTrue(GetQuery("\"hello there\"") is PhraseQuery);
+
+            AssertQueryEquals("germ term^2.0", null, "germ term^2.0");
+            AssertQueryEquals("(term)^2.0", null, "term^2.0");
+            AssertQueryEquals("(germ term)^2.0", null, "(germ term)^2.0");
+            AssertQueryEquals("term^2.0", null, "term^2.0");
+            AssertQueryEquals("term^2", null, "term^2.0");
+            AssertQueryEquals("\"germ term\"^2.0", null, "\"germ term\"^2.0");
+            AssertQueryEquals("\"term germ\"^2", null, "\"term germ\"^2.0");
+
+            AssertQueryEquals("(foo OR bar) AND (baz OR boo)", null,
+                              "+(foo bar) +(baz boo)");
+            AssertQueryEquals("((a OR b) AND NOT c) OR d", null,
+                              "(+(a b) -c) d");
+            AssertQueryEquals("+(apple \"steve jobs\") -(foo bar baz)", null,
+                              "+(apple \"steve jobs\") -(foo bar baz)");
+            AssertQueryEquals("+title:(dog OR cat) -author:\"bob dole\"", null,
+                              "+(title:dog title:cat) -author:\"bob dole\"");
+
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void TestDefaultOperator()
+        {
+            throw new NotImplementedException();
+        }
+
+        private class OperatorVsWhitespaceAnalyzer : Analyzer
+        {
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
+            }
+        }
+
+        [Test]
+        public void TestOperatorVsWhitespace()
+        { //LUCENE-2566
+            // +,-,! should be directly adjacent to operand (i.e. not separated by whitespace) to be treated as an operator
+            Analyzer a = new OperatorVsWhitespaceAnalyzer();
+            AssertQueryEquals("a - b", a, "a - b");
+            AssertQueryEquals("a + b", a, "a + b");
+            AssertQueryEquals("a ! b", a, "a ! b");
+        }
+
+        [Test]
+        public void TestPunct()
+        {
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+            AssertQueryEquals("a&b", a, "a&b");
+            AssertQueryEquals("a&&b", a, "a&&b");
+            AssertQueryEquals(".NET", a, ".NET");
+        }
+
+        [Test]
+        public void TestSlop()
+        {
+            AssertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2");
+            AssertQueryEquals("\"term germ\"~2 flork", null, "\"term germ\"~2 flork");
+            AssertQueryEquals("\"term\"~2", null, "term");
+            AssertQueryEquals("\" \"~2 germ", null, "germ");
+            AssertQueryEquals("\"term germ\"~2^2", null, "\"term germ\"~2^2.0");
+        }
+
+        [Test]
+        public void TestNumber()
+        {
+            // The numbers go away because SimpleAnalzyer ignores them
+            AssertQueryEquals("3", null, "");
+            AssertQueryEquals("term 1.0 1 2", null, "term");
+            AssertQueryEquals("term term1 term2", null, "term term term");
+
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, true);
+            AssertQueryEquals("3", a, "3");
+            AssertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2");
+            AssertQueryEquals("term term1 term2", a, "term term1 term2");
+        }
+
+        [Test]
+        public void TestWildcard()
+        {
+            AssertQueryEquals("term*", null, "term*");
+            AssertQueryEquals("term*^2", null, "term*^2.0");
+            AssertQueryEquals("term~", null, "term~2");
+            AssertQueryEquals("term~1", null, "term~1");
+            AssertQueryEquals("term~0.7", null, "term~1");
+            AssertQueryEquals("term~^3", null, "term~2^3.0");
+            AssertQueryEquals("term^3~", null, "term~2^3.0");
+            AssertQueryEquals("term*germ", null, "term*germ");
+            AssertQueryEquals("term*germ^3", null, "term*germ^3.0");
+
+            assertTrue(GetQuery("term*") is PrefixQuery);
+            assertTrue(GetQuery("term*^2") is PrefixQuery);
+            assertTrue(GetQuery("term~") is FuzzyQuery);
+            assertTrue(GetQuery("term~0.7") is FuzzyQuery);
+            FuzzyQuery fq = (FuzzyQuery)GetQuery("term~0.7");
+            assertEquals(1, fq.MaxEdits);
+            assertEquals(FuzzyQuery.DefaultPrefixLength, fq.PrefixLength);
+            fq = (FuzzyQuery)GetQuery("term~");
+            assertEquals(2, fq.MaxEdits);
+            assertEquals(FuzzyQuery.DefaultPrefixLength, fq.PrefixLength);
+
+            AssertParseException("term~1.1"); // value > 1, throws exception
+
+            assertTrue(GetQuery("term*germ") is WildcardQuery);
+
+            /* Tests to see that wild card terms are (or are not) properly
+               * lower-cased with propery parser configuration
+               */
+            // First prefix queries:
+            // by default, convert to lowercase:
+            AssertWildcardQueryEquals("Term*", true, "term*");
+            // explicitly set lowercase:
+            AssertWildcardQueryEquals("term*", true, "term*");
+            AssertWildcardQueryEquals("Term*", true, "term*");
+            AssertWildcardQueryEquals("TERM*", true, "term*");
+            // explicitly disable lowercase conversion:
+            AssertWildcardQueryEquals("term*", false, "term*");
+            AssertWildcardQueryEquals("Term*", false, "Term*");
+            AssertWildcardQueryEquals("TERM*", false, "TERM*");
+            // Then 'full' wildcard queries:
+            // by default, convert to lowercase:
+            AssertWildcardQueryEquals("Te?m", "te?m");
+            // explicitly set lowercase:
+            AssertWildcardQueryEquals("te?m", true, "te?m");
+            AssertWildcardQueryEquals("Te?m", true, "te?m");
+            AssertWildcardQueryEquals("TE?M", true, "te?m");
+            AssertWildcardQueryEquals("Te?m*gerM", true, "te?m*germ");
+            // explicitly disable lowercase conversion:
+            AssertWildcardQueryEquals("te?m", false, "te?m");
+            AssertWildcardQueryEquals("Te?m", false, "Te?m");
+            AssertWildcardQueryEquals("TE?M", false, "TE?M");
+            AssertWildcardQueryEquals("Te?m*gerM", false, "Te?m*gerM");
+            //  Fuzzy queries:
+            AssertWildcardQueryEquals("Term~", "term~2");
+            AssertWildcardQueryEquals("Term~", true, "term~2");
+            AssertWildcardQueryEquals("Term~", false, "Term~2");
+            //  Range queries:
+            AssertWildcardQueryEquals("[A TO C]", "[a TO c]");
+            AssertWildcardQueryEquals("[A TO C]", true, "[a TO c]");
+            AssertWildcardQueryEquals("[A TO C]", false, "[A TO C]");
+            // Test suffix queries: first disallow
+            try
+            {
+                AssertWildcardQueryEquals("*Term", true, "*term");
+            }
+            catch (Exception pe)
+            {
+                // expected exception
+                if (!IsQueryParserException(pe))
+                {
+                    fail();
+                }
+            }
+            try
+            {
+                AssertWildcardQueryEquals("?Term", true, "?term");
+                fail();
+            }
+            catch (Exception pe)
+            {
+                // expected exception
+                if (!IsQueryParserException(pe))
+                {
+                    fail();
+                }
+            }
+            // Test suffix queries: then allow
+            AssertWildcardQueryEquals("*Term", true, "*term", true);
+            AssertWildcardQueryEquals("?Term", true, "?term", true);
+        }
+
+        [Test]
+        public void TestLeadingWildcardType()
+        {
+            ICommonQueryParserConfiguration cqpC = GetParserConfig(null);
+            cqpC.AllowLeadingWildcard = (true);
+            assertEquals(typeof(WildcardQuery), GetQuery("t*erm*", cqpC).GetType());
+            assertEquals(typeof(WildcardQuery), GetQuery("?term*", cqpC).GetType());
+            assertEquals(typeof(WildcardQuery), GetQuery("*term*", cqpC).GetType());
+        }
+
+        [Test]
+        public void TestQPA()
+        {
+            AssertQueryEquals("term term^3.0 term", qpAnalyzer, "term term^3.0 term");
+            AssertQueryEquals("term stop^3.0 term", qpAnalyzer, "term term");
+
+            AssertQueryEquals("term term term", qpAnalyzer, "term term term");
+            AssertQueryEquals("term +stop term", qpAnalyzer, "term term");
+            AssertQueryEquals("term -stop term", qpAnalyzer, "term term");
+
+            AssertQueryEquals("drop AND (stop) AND roll", qpAnalyzer, "+drop +roll");
+            AssertQueryEquals("term +(stop) term", qpAnalyzer, "term term");
+            AssertQueryEquals("term -(stop) term", qpAnalyzer, "term term");
+
+            AssertQueryEquals("drop AND stop AND roll", qpAnalyzer, "+drop +roll");
+            AssertQueryEquals("term phrase term", qpAnalyzer,
+                              "term (phrase1 phrase2) term");
+            AssertQueryEquals("term AND NOT phrase term", qpAnalyzer,
+                              "+term -(phrase1 phrase2) term");
+            AssertQueryEquals("stop^3", qpAnalyzer, "");
+            AssertQueryEquals("stop", qpAnalyzer, "");
+            AssertQueryEquals("(stop)^3", qpAnalyzer, "");
+            AssertQueryEquals("((stop))^3", qpAnalyzer, "");
+            AssertQueryEquals("(stop^3)", qpAnalyzer, "");
+            AssertQueryEquals("((stop)^3)", qpAnalyzer, "");
+            AssertQueryEquals("(stop)", qpAnalyzer, "");
+            AssertQueryEquals("((stop))", qpAnalyzer, "");
+            assertTrue(GetQuery("term term term", qpAnalyzer) is BooleanQuery);
+            assertTrue(GetQuery("term +stop", qpAnalyzer) is TermQuery);
+
+            ICommonQueryParserConfiguration cqpc = GetParserConfig(qpAnalyzer);
+            SetDefaultOperatorAND(cqpc);
+            AssertQueryEquals(cqpc, "field", "term phrase term",
+                "+term +(+phrase1 +phrase2) +term");
+            AssertQueryEquals(cqpc, "field", "phrase",
+                "+phrase1 +phrase2");
+        }
+
+        [Test]
+        public void TestRange()
+        {
+            AssertQueryEquals("[ a TO z]", null, "[a TO z]");
+            AssertQueryEquals("[ a TO z}", null, "[a TO z}");
+            AssertQueryEquals("{ a TO z]", null, "{a TO z]");
+
+            assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((TermRangeQuery)GetQuery("[ a TO z]")).GetRewriteMethod());
+
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true));
+
+            qp.MultiTermRewriteMethod=(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+            assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE, ((TermRangeQuery)GetQuery("[ a TO z]", qp)).GetRewriteMethod());
+
+            // test open ranges
+            AssertQueryEquals("[ a TO * ]", null, "[a TO *]");
+            AssertQueryEquals("[ * TO z ]", null, "[* TO z]");
+            AssertQueryEquals("[ * TO * ]", null, "[* TO *]");
+
+            // mixing exclude and include bounds
+            AssertQueryEquals("{ a TO z ]", null, "{a TO z]");
+            AssertQueryEquals("[ a TO z }", null, "[a TO z}");
+            AssertQueryEquals("{ a TO * ]", null, "{a TO *]");
+            AssertQueryEquals("[ * TO z }", null, "[* TO z}");
+
+            AssertQueryEquals("[ a TO z ]", null, "[a TO z]");
+            AssertQueryEquals("{ a TO z}", null, "{a TO z}");
+            AssertQueryEquals("{ a TO z }", null, "{a TO z}");
+            AssertQueryEquals("{ a TO z }^2.0", null, "{a TO z}^2.0");
+            AssertQueryEquals("[ a TO z] OR bar", null, "[a TO z] bar");
+            AssertQueryEquals("[ a TO z] AND bar", null, "+[a TO z] +bar");
+            AssertQueryEquals("( bar blar { a TO z}) ", null, "bar blar {a TO z}");
+            AssertQueryEquals("gack ( bar blar { a TO z}) ", null, "gack (bar blar {a TO z})");
+
+            AssertQueryEquals("[* TO Z]", null, "[* TO z]");
+            AssertQueryEquals("[A TO *]", null, "[a TO *]");
+            AssertQueryEquals("[* TO *]", null, "[* TO *]");
+        }
+
+        [Test]
+        public void TestRangeWithPhrase()
+        {
+            AssertQueryEquals("[\\* TO \"*\"]", null, "[\\* TO \\*]");
+            AssertQueryEquals("[\"*\" TO *]", null, "[\\* TO *]");
+        }
+
+        private string EscapeDateString(string s)
+        {
+            if (s.IndexOf(" ") > -1)
+            {
+                return "\"" + s + "\"";
+            }
+            else
+            {
+                return s;
+            }
+        }
+
+        /// <summary>for testing DateTools support</summary>
+        private string GetDate(string s, DateTools.Resolution resolution)
+        {
+            // TODO: Is this the correct way to parse the string?
+            DateTime d = DateTime.Parse(s, System.Globalization.CultureInfo.InvariantCulture);
+            return GetDate(d, resolution);
+
+            //// we use the default Locale since LuceneTestCase randomizes it
+            //DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, Locale.getDefault());
+            //return GetDate(df.Parse(s), resolution);      
+        }
+
+        /// <summary>for testing DateTools support</summary>
+        private string GetDate(DateTime d, DateTools.Resolution resolution)
+        {
+            return DateTools.DateToString(d, resolution);
+        }
+
+        private string GetLocalizedDate(int year, int month, int day)
+        {
+            // TODO: Is this the right way to get the localized date?
+            DateTime d = new DateTime(year, month, day, 23, 59, 59, 999);
+            return d.ToString();
+
+            //// we use the default Locale/TZ since LuceneTestCase randomizes it
+            //DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, Locale.getDefault());
+            //Calendar calendar = new GregorianCalendar(TimeZone.getDefault(), Locale.getDefault());
+            //calendar.clear();
+            //calendar.set(year, month, day);
+            //calendar.set(Calendar.HOUR_OF_DAY, 23);
+            //calendar.set(Calendar.MINUTE, 59);
+            //calendar.set(Calendar.SECOND, 59);
+            //calendar.set(Calendar.MILLISECOND, 999);
+            //return df.format(calendar.getTime());
+        }
+
+        // TODO: Fix this test
+        [Test]
+        public void TestDateRange()
+        {
+            Assert.Fail("Test is not implemented");
+
+        //    string startDate = GetLocalizedDate(2002, 1, 1);
+        //    string endDate = GetLocalizedDate(2002, 1, 4);
+        //    // we use the default Locale/TZ since LuceneTestCase randomizes it
+        //    Calendar endDateExpected = new GregorianCalendar(TimeZone.getDefault(), Locale.getDefault());
+        //    endDateExpected.clear();
+        //    endDateExpected.set(2002, 1, 4, 23, 59, 59);
+        //    endDateExpected.set(Calendar.MILLISECOND, 999);
+        //    string defaultField = "default";
+        //    string monthField = "month";
+        //    string hourField = "hour";
+        //    Analyzer a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+        //    CommonQueryParserConfiguration qp = GetParserConfig(a);
+
+        //    // set a field specific date resolution
+        //    SetDateResolution(qp, monthField, DateTools.Resolution.MONTH);
+
+        //    // set default date resolution to MILLISECOND
+        //    qp.SetDateResolution(DateTools.Resolution.MILLISECOND);
+
+        //    // set second field specific date resolution    
+        //    SetDateResolution(qp, hourField, DateTools.Resolution.HOUR);
+
+        //    // for this field no field specific date resolution has been set,
+        //    // so verify if the default resolution is used
+        //    AssertDateRangeQueryEquals(qp, defaultField, startDate, endDate,
+        //            endDateExpected.getTime(), DateTools.Resolution.MILLISECOND);
+
+        //    // verify if field specific date resolutions are used for these two fields
+        //    AssertDateRangeQueryEquals(qp, monthField, startDate, endDate,
+        //            endDateExpected.getTime(), DateTools.Resolution.MONTH);
+
+        //    AssertDateRangeQueryEquals(qp, hourField, startDate, endDate,
+        //            endDateExpected.getTime(), DateTools.Resolution.HOUR);
+        }
+
+        public void AssertDateRangeQueryEquals(ICommonQueryParserConfiguration cqpC, string field, string startDate, string endDate,
+            DateTime endDateInclusive, DateTools.Resolution resolution)
+        {
+            AssertQueryEquals(cqpC, field, field + ":[" + EscapeDateString(startDate) + " TO " + EscapeDateString(endDate) + "]",
+                       "[" + GetDate(startDate, resolution) + " TO " + GetDate(endDateInclusive, resolution) + "]");
+            AssertQueryEquals(cqpC, field, field + ":{" + EscapeDateString(startDate) + " TO " + EscapeDateString(endDate) + "}",
+                       "{" + GetDate(startDate, resolution) + " TO " + GetDate(endDate, resolution) + "}");
+        }
+
+        [Test]
+        public void TestEscaped()
+        {
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+
+            /*AssertQueryEquals("\\[brackets", a, "\\[brackets");
+            AssertQueryEquals("\\[brackets", null, "brackets");
+            AssertQueryEquals("\\\\", a, "\\\\");
+            AssertQueryEquals("\\+blah", a, "\\+blah");
+            AssertQueryEquals("\\(blah", a, "\\(blah");
+
+            AssertQueryEquals("\\-blah", a, "\\-blah");
+            AssertQueryEquals("\\!blah", a, "\\!blah");
+            AssertQueryEquals("\\{blah", a, "\\{blah");
+            AssertQueryEquals("\\}blah", a, "\\}blah");
+            AssertQueryEquals("\\:blah", a, "\\:blah");
+            AssertQueryEquals("\\^blah", a, "\\^blah");
+            AssertQueryEquals("\\[blah", a, "\\[blah");
+            AssertQueryEquals("\\]blah", a, "\\]blah");
+            AssertQueryEquals("\\\"blah", a, "\\\"blah");
+            AssertQueryEquals("\\(blah", a, "\\(blah");
+            AssertQueryEquals("\\)blah", a, "\\)blah");
+            AssertQueryEquals("\\~blah", a, "\\~blah");
+            AssertQueryEquals("\\*blah", a, "\\*blah");
+            AssertQueryEquals("\\?blah", a, "\\?blah");
+            //AssertQueryEquals("foo \\&\\& bar", a, "foo \\&\\& bar");
+            //AssertQueryEquals("foo \\|| bar", a, "foo \\|| bar");
+            //AssertQueryEquals("foo \\AND bar", a, "foo \\AND bar");*/
+
+            AssertQueryEquals("\\a", a, "a");
+
+            AssertQueryEquals("a\\-b:c", a, "a-b:c");
+            AssertQueryEquals("a\\+b:c", a, "a+b:c");
+            AssertQueryEquals("a\\:b:c", a, "a:b:c");
+            AssertQueryEquals("a\\\\b:c", a, "a\\b:c");
+
+            AssertQueryEquals("a:b\\-c", a, "a:b-c");
+            AssertQueryEquals("a:b\\+c", a, "a:b+c");
+            AssertQueryEquals("a:b\\:c", a, "a:b:c");
+            AssertQueryEquals("a:b\\\\c", a, "a:b\\c");
+
+            AssertQueryEquals("a:b\\-c*", a, "a:b-c*");
+            AssertQueryEquals("a:b\\+c*", a, "a:b+c*");
+            AssertQueryEquals("a:b\\:c*", a, "a:b:c*");
+
+            AssertQueryEquals("a:b\\\\c*", a, "a:b\\c*");
+
+            AssertQueryEquals("a:b\\-c~", a, "a:b-c~2");
+            AssertQueryEquals("a:b\\+c~", a, "a:b+c~2");
+            AssertQueryEquals("a:b\\:c~", a, "a:b:c~2");
+            AssertQueryEquals("a:b\\\\c~", a, "a:b\\c~2");
+
+            AssertQueryEquals("[ a\\- TO a\\+ ]", null, "[a- TO a+]");
+            AssertQueryEquals("[ a\\: TO a\\~ ]", null, "[a: TO a~]");
+            AssertQueryEquals("[ a\\\\ TO a\\* ]", null, "[a\\ TO a*]");
+
+            AssertQueryEquals("[\"c\\:\\\\temp\\\\\\~foo0.txt\" TO \"c\\:\\\\temp\\\\\\~foo9.txt\"]", a,
+                              "[c:\\temp\\~foo0.txt TO c:\\temp\\~foo9.txt]");
+
+            AssertQueryEquals("a\\\\\\+b", a, "a\\+b");
+
+            AssertQueryEquals("a \\\"b c\\\" d", a, "a \"b c\" d");
+            AssertQueryEquals("\"a \\\"b c\\\" d\"", a, "\"a \"b c\" d\"");
+            AssertQueryEquals("\"a \\+b c d\"", a, "\"a +b c d\"");
+
+            AssertQueryEquals("c\\:\\\\temp\\\\\\~foo.txt", a, "c:\\temp\\~foo.txt");
+
+            AssertParseException("XY\\"); // there must be a character after the escape char
+
+            // test unicode escaping
+            AssertQueryEquals("a\\u0062c", a, "abc");
+            AssertQueryEquals("XY\\u005a", a, "XYZ");
+            AssertQueryEquals("XY\\u005A", a, "XYZ");
+            AssertQueryEquals("\"a \\\\\\u0028\\u0062\\\" c\"", a, "\"a \\(b\" c\"");
+
+            AssertParseException("XY\\u005G");  // test non-hex character in escaped unicode sequence
+            AssertParseException("XY\\u005");   // test incomplete escaped unicode sequence
+
+            // Tests bug LUCENE-800
+            AssertQueryEquals("(item:\\\\ item:ABCD\\\\)", a, "item:\\ item:ABCD\\");
+            AssertParseException("(item:\\\\ item:ABCD\\\\))"); // unmatched closing paranthesis 
+            AssertQueryEquals("\\*", a, "*");
+            AssertQueryEquals("\\\\", a, "\\");  // escaped backslash
+
+            AssertParseException("\\"); // a backslash must always be escaped
+
+            // LUCENE-1189
+            AssertQueryEquals("(\"a\\\\\") or (\"b\")", a, "a\\ or b");
+        }
+
+        [Test]
+        public void TestEscapedVsQuestionMarkAsWildcard()
+        {
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+            AssertQueryEquals("a:b\\-?c", a, "a:b\\-?c");
+            AssertQueryEquals("a:b\\+?c", a, "a:b\\+?c");
+            AssertQueryEquals("a:b\\:?c", a, "a:b\\:?c");
+
+            AssertQueryEquals("a:b\\\\?c", a, "a:b\\\\?c");
+        }
+
+        [Test]
+        public void TestQueryStringEscaping()
+        {
+            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+
+            AssertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c");
+            AssertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c");
+            AssertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
+            AssertEscapedQueryEquals("a\\b:c", a, "a\\\\b\\:c");
+
+            AssertEscapedQueryEquals("a:b-c", a, "a\\:b\\-c");
+            AssertEscapedQueryEquals("a:b+c", a, "a\\:b\\+c");
+            AssertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
+            AssertEscapedQueryEquals("a:b\\c", a, "a\\:b\\\\c");
+
+            AssertEscapedQueryEquals("a:b-c*", a, "a\\:b\\-c\\*");
+            AssertEscapedQueryEquals("a:b+c*", a, "a\\:b\\+c\\*");
+            AssertEscapedQueryEquals("a:b:c*", a, "a\\:b\\:c\\*");
+
+            AssertEscapedQueryEquals("a:b\\\\c*", a, "a\\:b\\\\\\\\c\\*");
+
+            AssertEscapedQueryEquals("a:b-?c", a, "a\\:b\\-\\?c");
+            AssertEscapedQueryEquals("a:b+?c", a, "a\\:b\\+\\?c");
+            AssertEscapedQueryEquals("a:b:?c", a, "a\\:b\\:\\?c");
+
+            AssertEscapedQueryEquals("a:b?c", a, "a\\:b\\?c");
+
+            AssertEscapedQueryEquals("a:b-c~", a, "a\\:b\\-c\\~");
+            AssertEscapedQueryEquals("a:b+c~", a, "a\\:b\\+c\\~");
+            AssertEscapedQueryEquals("a:b:c~", a, "a\\:b\\:c\\~");
+            AssertEscapedQueryEquals("a:b\\c~", a, "a\\:b\\\\c\\~");
+
+            AssertEscapedQueryEquals("[ a - TO a+ ]", null, "\\[ a \\- TO a\\+ \\]");
+            AssertEscapedQueryEquals("[ a : TO a~ ]", null, "\\[ a \\: TO a\\~ \\]");
+            AssertEscapedQueryEquals("[ a\\ TO a* ]", null, "\\[ a\\\\ TO a\\* \\]");
+
+            // LUCENE-881
+            AssertEscapedQueryEquals("|| abc ||", a, "\\|\\| abc \\|\\|");
+            AssertEscapedQueryEquals("&& abc &&", a, "\\&\\& abc \\&\\&");
+        }
+
+        [Test]
+        public void TestTabNewlineCarriageReturn()
+        {
+            AssertQueryEqualsDOA("+weltbank +worlbank", null,
+              "+weltbank +worlbank");
+
+            AssertQueryEqualsDOA("+weltbank\n+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \n+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \n +worlbank", null,
+              "+weltbank +worlbank");
+
+            AssertQueryEqualsDOA("+weltbank\r+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \r+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \r +worlbank", null,
+              "+weltbank +worlbank");
+
+            AssertQueryEqualsDOA("+weltbank\r\n+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \r\n+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \r\n +worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \r \n +worlbank", null,
+              "+weltbank +worlbank");
+
+            AssertQueryEqualsDOA("+weltbank\t+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \t+worlbank", null,
+              "+weltbank +worlbank");
+            AssertQueryEqualsDOA("weltbank \t +worlbank", null,
+              "+weltbank +worlbank");
+        }
+
+        [Test]
+        public void TestSimpleDAO()
+        {
+            AssertQueryEqualsDOA("term term term", null, "+term +term +term");
+            AssertQueryEqualsDOA("term +term term", null, "+term +term +term");
+            AssertQueryEqualsDOA("term term +term", null, "+term +term +term");
+            AssertQueryEqualsDOA("term +term +term", null, "+term +term +term");
+            AssertQueryEqualsDOA("-term term term", null, "-term +term +term");
+        }
+
+        [Test]
+        public void TestBoost()
+        {
+            CharacterRunAutomaton stopWords = new CharacterRunAutomaton(BasicAutomata.MakeString("on"));
+            Analyzer oneStopAnalyzer = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, stopWords);
+            ICommonQueryParserConfiguration qp = GetParserConfig(oneStopAnalyzer);
+            Query q = GetQuery("on^1.0", qp);
+            assertNotNull(q);
+            q = GetQuery("\"hello\"^2.0", qp);
+            assertNotNull(q);
+            assertEquals(q.Boost, (float)2.0, (float)0.5);
+            q = GetQuery("hello^2.0", qp);
+            assertNotNull(q);
+            assertEquals(q.Boost, (float)2.0, (float)0.5);
+            q = GetQuery("\"on\"^1.0", qp);
+            assertNotNull(q);
+
+            Analyzer a2 = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
+            ICommonQueryParserConfiguration qp2 = GetParserConfig(a2);
+            q = GetQuery("the^3", qp2);
+            // "the" is a stop word so the result is an empty query:
+            assertNotNull(q);
+            assertEquals("", q.toString());
+            assertEquals(1.0f, q.Boost, 0.01f);
+        }
+
+        public void AssertParseException(string queryString)
+        {
+            try
+            {
+                GetQuery(queryString);
+            }
+            catch (Exception expected)
+            {
+                if (IsQueryParserException(expected))
+                {
+                    return;
+                }
+            }
+            fail("ParseException expected, not thrown");
+        }
+
+        public void AssertParseException(string queryString, Analyzer a)
+        {
+            try
+            {
+                GetQuery(queryString, a);
+            }
+            catch (Exception expected)
+            {
+                if (IsQueryParserException(expected))
+                {
+                    return;
+                }
+            }
+            fail("ParseException expected, not thrown");
+        }
+
+        [Test]
+        public void TestException()
+        {
+            AssertParseException("\"some phrase");
+            AssertParseException("(foo bar");
+            AssertParseException("foo bar))");
+            AssertParseException("field:term:with:colon some more terms");
+            AssertParseException("(sub query)^5.0^2.0 plus more");
+            AssertParseException("secret AND illegal) AND access:confidential");
+        }
+
+        [Test]
+        public void TestBooleanQuery()
+        {
+            BooleanQuery.MaxClauseCount = (2);
+            Analyzer purWhitespaceAnalyzer = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+            AssertParseException("one two three", purWhitespaceAnalyzer);
+        }
+
+        [Test]
+        public void TestPrecedence()
+        {
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+            Query query1 = GetQuery("A AND B OR C AND D", qp);
+            Query query2 = GetQuery("+A +B +C +D", qp);
+            assertEquals(query1, query2);
+        }
+
+        // LUCENETODO: convert this from DateField to DateUtil
+        //  public void testLocalDateFormat() throws IOException, ParseException {
+        //    Directory ramDir = newDirectory();
+        //    IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
+        //    addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
+        //    addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
+        //    iw.close();
+        //    IndexSearcher is = new IndexSearcher(ramDir, true);
+        //    assertHits(1, "[12/1/2005 TO 12/3/2005]", is);
+        //    assertHits(2, "[12/1/2005 TO 12/4/2005]", is);
+        //    assertHits(1, "[12/3/2005 TO 12/4/2005]", is);
+        //    assertHits(1, "{12/1/2005 TO 12/3/2005}", is);
+        //    assertHits(1, "{12/1/2005 TO 12/4/2005}", is);
+        //    assertHits(0, "{12/3/2005 TO 12/4/2005}", is);
+        //    is.close();
+        //    ramDir.close();
+        //  }
+        //
+        //  private void addDateDoc(String content, int year, int month,
+        //                          int day, int hour, int minute, int second, IndexWriter iw) throws IOException {
+        //    Document d = new Document();
+        //    d.add(newField("f", content, Field.Store.YES, Field.Index.ANALYZED));
+        //    Calendar cal = Calendar.getInstance(Locale.ENGLISH);
+        //    cal.set(year, month - 1, day, hour, minute, second);
+        //    d.add(newField("date", DateField.dateToString(cal.getTime()), Field.Store.YES, Field.Index.NOT_ANALYZED));
+        //    iw.addDocument(d);
+        //  }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void TestStarParsing()
+        {
+            throw new NotImplementedException();
+        }
+
+        [Test]
+        public void TestEscapedWildcard()
+        {
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+            WildcardQuery q = new WildcardQuery(new Term("field", "foo\\?ba?r"));
+            assertEquals(q, GetQuery("foo\\?ba?r", qp));
+        }
+
+        [Test]
+        public void TestRegexps()
+        {
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+            RegexpQuery q = new RegexpQuery(new Term("field", "[a-z][123]"));
+            assertEquals(q, GetQuery("/[a-z][123]/", qp));
+            qp.LowercaseExpandedTerms = (true);
+            assertEquals(q, GetQuery("/[A-Z][123]/", qp));
+            q.Boost = (0.5f);
+            assertEquals(q, GetQuery("/[A-Z][123]/^0.5", qp));
+            qp.MultiTermRewriteMethod=(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+            q.SetRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+            assertTrue(GetQuery("/[A-Z][123]/^0.5", qp) is RegexpQuery);
+            assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE, ((RegexpQuery)GetQuery("/[A-Z][123]/^0.5", qp)).GetRewriteMethod());
+            assertEquals(q, GetQuery("/[A-Z][123]/^0.5", qp));
+            qp.MultiTermRewriteMethod=(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT);
+
+            Query escaped = new RegexpQuery(new Term("field", "[a-z]\\/[123]"));
+            assertEquals(escaped, GetQuery("/[a-z]\\/[123]/", qp));
+            Query escaped2 = new RegexpQuery(new Term("field", "[a-z]\\*[123]"));
+            assertEquals(escaped2, GetQuery("/[a-z]\\*[123]/", qp));
+
+            BooleanQuery complex = new BooleanQuery();
+            complex.Add(new RegexpQuery(new Term("field", "[a-z]\\/[123]")), BooleanClause.Occur.MUST);
+            complex.Add(new TermQuery(new Term("path", "/etc/init.d/")), BooleanClause.Occur.MUST);
+            complex.Add(new TermQuery(new Term("field", "/etc/init[.]d/lucene/")), BooleanClause.Occur.SHOULD);
+            assertEquals(complex, GetQuery("/[a-z]\\/[123]/ AND path:\"/etc/init.d/\" OR \"/etc\\/init\\[.\\]d/lucene/\" ", qp));
+
+            Query re = new RegexpQuery(new Term("field", "http.*"));
+            assertEquals(re, GetQuery("field:/http.*/", qp));
+            assertEquals(re, GetQuery("/http.*/", qp));
+
+            re = new RegexpQuery(new Term("field", "http~0.5"));
+            assertEquals(re, GetQuery("field:/http~0.5/", qp));
+            assertEquals(re, GetQuery("/http~0.5/", qp));
+
+            re = new RegexpQuery(new Term("field", "boo"));
+            assertEquals(re, GetQuery("field:/boo/", qp));
+            assertEquals(re, GetQuery("/boo/", qp));
+
+            assertEquals(new TermQuery(new Term("field", "/boo/")), GetQuery("\"/boo/\"", qp));
+            assertEquals(new TermQuery(new Term("field", "/boo/")), GetQuery("\\/boo\\/", qp));
+
+            BooleanQuery two = new BooleanQuery();
+            two.Add(new RegexpQuery(new Term("field", "foo")), BooleanClause.Occur.SHOULD);
+            two.Add(new RegexpQuery(new Term("field", "bar")), BooleanClause.Occur.SHOULD);
+            assertEquals(two, GetQuery("field:/foo/ field:/bar/", qp));
+            assertEquals(two, GetQuery("/foo/ /bar/", qp));
+        }
+
+        [Test]
+        public void TestStopwords()
+        {
+            CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").ToAutomaton());
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, stopSet));
+            Query result = GetQuery("field:the OR field:foo", qp);
+            assertNotNull("result is null and it shouldn't be", result);
+            assertTrue("result is not a BooleanQuery", result is BooleanQuery);
+            assertTrue(((BooleanQuery)result).Clauses.Length + " does not equal: " + 0, ((BooleanQuery)result).Clauses.Length == 0);
+            result = GetQuery("field:woo OR field:the", qp);
+            assertNotNull("result is null and it shouldn't be", result);
+            assertTrue("result is not a TermQuery", result is TermQuery);
+            result = GetQuery("(fieldX:xxxxx OR fieldy:xxxxxxxx)^2 AND (fieldx:the OR fieldy:foo)", qp);
+            assertNotNull("result is null and it shouldn't be", result);
+            assertTrue("result is not a BooleanQuery", result is BooleanQuery);
+            if (VERBOSE) Console.WriteLine("Result: " + result);
+            assertTrue(((BooleanQuery)result).Clauses.Length + " does not equal: " + 2, ((BooleanQuery)result).Clauses.Length == 2);
+        }
+
+        [Test]
+        public void TestPositionIncrement()
+        {
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET));
+            qp.EnablePositionIncrements = (true);
+            String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\"";
+            //               0         2                      5           7  8
+            int[] expectedPositions = { 1, 3, 4, 6, 9 };
+            PhraseQuery pq = (PhraseQuery)GetQuery(qtxt, qp);
+            //System.out.println("Query text: "+qtxt);
+            //System.out.println("Result: "+pq);
+            Term[] t = pq.Terms;
+            int[] pos = pq.Positions;
+            for (int i = 0; i < t.Length; i++)
+            {
+                //System.out.println(i+". "+t[i]+"  pos: "+pos[i]);
+                assertEquals("term " + i + " = " + t[i] + " has wrong term-position!", expectedPositions[i], pos[i]);
+            }
+        }
+
+        [Test]
+        public void TestMatchAllDocs()
+        {
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+            assertEquals(new MatchAllDocsQuery(), GetQuery("*:*", qp));
+            assertEquals(new MatchAllDocsQuery(), GetQuery("(*:*)", qp));
+            BooleanQuery bq = (BooleanQuery)GetQuery("+*:* -*:*", qp);
+            assertTrue(bq.Clauses[0].Query is MatchAllDocsQuery);
+            assertTrue(bq.Clauses[1].Query is MatchAllDocsQuery);
+        }
+
+        private void AssertHits(int expected, String query, IndexSearcher @is)
+        {
+            string oldDefaultField = DefaultField;
+            DefaultField = "date";
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
+            qp.Locale = System.Globalization.CultureInfo.GetCultureInfo("en");
+            Query q = GetQuery(query, qp);
+            ScoreDoc[] hits = @is.Search(q, null, 1000).ScoreDocs;
+            assertEquals(expected, hits.Length);
+            DefaultField = oldDefaultField;
+        }
+
+        public override void TearDown()
+        {
+            BooleanQuery.MaxClauseCount = originalMaxClauses;
+            base.TearDown();
+        }
+
+        // LUCENE-2002: make sure defaults for StandardAnalyzer's
+        // enableStopPositionIncr & QueryParser's enablePosIncr
+        // "match"
+        [Test]
+        public void TestPositionIncrements()
+        {
+            using (Directory dir = NewDirectory())
+            {
+                Analyzer a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
+                using (IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, a)))
+                {
+                    Document doc = new Document();
+                    doc.Add(NewTextField("field", "the wizard of ozzy", Field.Store.NO));
+                    w.AddDocument(doc);
+                    using (IndexReader r = DirectoryReader.Open(w, true))
+                    {
+                        IndexSearcher s = NewSearcher(r);
+
+                        Query q = GetQuery("\"wizard of ozzy\"", a);
+                        assertEquals(1, s.Search(q, 1).TotalHits);
+                    }
+                }
+            }
+        }
+
+        /// <summary>
+        /// adds synonym of "dog" for "dogs".
+        /// </summary>
+        protected class MockSynonymFilter : TokenFilter
+        {
+            ICharTermAttribute termAtt;
+            IPositionIncrementAttribute posIncAtt;
+            bool addSynonym = false;
+
+            public MockSynonymFilter(TokenStream input)
+                : base(input)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+                posIncAtt = AddAttribute<IPositionIncrementAttribute>();
+            }
+
+            public override sealed bool IncrementToken()
+            {
+                if (addSynonym)
+                { // inject our synonym
+                    ClearAttributes();
+                    termAtt.SetEmpty().Append("dog");
+                    posIncAtt.PositionIncrement = (0);
+                    addSynonym = false;
+                    return true;
+                }
+
+                if (input.IncrementToken())
+                {
+                    addSynonym = termAtt.toString().equals("dogs");
+                    return true;
+                }
+                else
+                {
+                    return false;
+                }
+            }
+        }
+
+        /// <summary>
+        /// whitespace+lowercase analyzer without synonyms
+        /// </summary>
+        protected class Analyzer1 : Analyzer
+        {
+            public Analyzer1()
+            { }
+
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+                return new TokenStreamComponents(tokenizer, new MockSynonymFilter(tokenizer));
+            }
+        }
+
+        /// <summary>
+        /// whitespace+lowercase analyzer without synonyms
+        /// </summary>
+        protected class Analyzer2 : Analyzer
+        {
+            public Analyzer2()
+            { }
+
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, true));
+            }
+        }
+
+        // Moved to AbstractQueryParserTestBase
+        public override void TestNewFieldQuery()
+        {
+            throw new NotImplementedException();
+        }
+
+        /// <summary>
+        /// Mock collation analyzer: indexes terms as "collated" + term
+        /// </summary>
+        private class MockCollationFilter : TokenFilter
+        {
+            private ICharTermAttribute termAtt;
+
+            public MockCollationFilter(TokenStream input)
+                : base(input)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+            }
+
+            public override sealed bool IncrementToken()
+            {
+                if (input.IncrementToken())
+                {
+                    string term = termAtt.toString();
+                    termAtt.SetEmpty().Append("collated").Append(term);
+                    return true;
+                }
+                else
+                {
+                    return false;
+                }
+            }
+        }
+
+        private class MockCollationAnalyzer : Analyzer
+        {
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+                return new TokenStreamComponents(tokenizer, new MockCollationFilter(tokenizer));
+            }
+        }
+
+        [Test]
+        public void TestCollatedRange()
+        {
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockCollationAnalyzer());
+            SetAnalyzeRangeTerms(qp, true);
+            Query expected = TermRangeQuery.NewStringRange(DefaultField, "collatedabc", "collateddef", true, true);
+            Query actual = GetQuery("[abc TO def]", qp);
+            assertEquals(expected, actual);
+        }
+
+        [Test]
+        public void TestDistanceAsEditsParsing()
+        {
+            FuzzyQuery q = (FuzzyQuery)GetQuery("foobar~2", new MockAnalyzer(Random()));
+            assertEquals(2, q.MaxEdits);
+        }
+
+        [Test]
+        public void TestPhraseQueryToString()
+        {
+            Analyzer analyzer = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
+            ICommonQueryParserConfiguration qp = GetParserConfig(analyzer);
+            qp.EnablePositionIncrements = (true);
+            PhraseQuery q = (PhraseQuery)GetQuery("\"this hi this is a test is\"", qp);
+            assertEquals("field:\"? hi ? ? ? test\"", q.toString());
+        }
+
+        [Test]
+        public void TestParseWildcardAndPhraseQueries()
+        {
+            string field = "content";
+            string oldDefaultField = DefaultField;
+            DefaultField = (field);
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random()));
+            qp.AllowLeadingWildcard=(true);
+
+            string[][] prefixQueries = new string[3][] {
+                new string[] {"a*", "ab*", "abc*",},
+                new string[] {"h*", "hi*", "hij*", "\\\\7*"},
+                new string[] {"o*", "op*", "opq*", "\\\\\\\\*"},
+            };
+
+            string[][] wildcardQueries = new string[3][] {
+                new string[] {"*a*", "*ab*", "*abc**", "ab*e*", "*g?", "*f?1", "abc**"},
+                new string[] {"*h*", "*hi*", "*hij**", "hi*k*", "*n?", "*m?1", "hij**"},
+                new string[] {"*o*", "*op*", "*opq**", "op*q*", "*u?", "*t?1", "opq**"},
+            };
+
+            // test queries that must be prefix queries
+            for (int i = 0; i < prefixQueries.Length; i++)
+            {
+                for (int j = 0; j < prefixQueries[i].Length; j++)
+                {
+                    string queryString = prefixQueries[i][j];
+                    Query q = GetQuery(queryString, qp);
+                    assertEquals(typeof(PrefixQuery), q.GetType());
+                }
+            }
+
+            // test queries that must be wildcard queries
+            for (int i = 0; i < wildcardQueries.Length; i++)
+            {
+                for (int j = 0; j < wildcardQueries[i].Length; j++)
+                {
+                    string qtxt = wildcardQueries[i][j];
+                    Query q = GetQuery(qtxt, qp);
+                    assertEquals(typeof(WildcardQuery), q.GetType());
+                }
+            }
+            DefaultField = (oldDefaultField);
+        }
+
+        [Test]
+        public void TestPhraseQueryPositionIncrements()
+        {
+            CharacterRunAutomaton stopStopList =
+            new CharacterRunAutomaton(new RegExp("[sS][tT][oO][pP]").ToAutomaton());
+
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false, stopStopList));
+
+            qp = GetParserConfig(
+                                 new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false, stopStopList));
+            qp.EnablePositionIncrements=(true);
+
+            PhraseQuery phraseQuery = new PhraseQuery();
+            phraseQuery.Add(new Term("field", "1"));
+            phraseQuery.Add(new Term("field", "2"), 2);
+            assertEquals(phraseQuery, GetQuery("\"1 stop 2\"", qp));
+        }
+
+        [Test]
+        public void TestMatchAllQueryParsing()
+        {
+            // test simple parsing of MatchAllDocsQuery
+            string oldDefaultField = DefaultField;
+            DefaultField = ("key");
+            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random()));
+            assertEquals(new MatchAllDocsQuery(), GetQuery(new MatchAllDocsQuery().toString(), qp));
+
+            // test parsing with non-default boost
+            MatchAllDocsQuery query = new MatchAllDocsQuery();
+            query.Boost = (2.3f);
+            assertEquals(query, GetQuery(query.toString(), qp));
+            DefaultField = (oldDefaultField);
+        }
+
+        [Test]
+        public void TestNestedAndClausesFoo()
+        {
+            string query = "(field1:[1 TO *] AND field1:[* TO 2]) AND field2:(z)";
+            BooleanQuery q = new BooleanQuery();
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(TermRangeQuery.NewStringRange("field1", "1", null, true, true), BooleanClause.Occur.MUST);
+            bq.Add(TermRangeQuery.NewStringRange("field1", null, "2", true, true), BooleanClause.Occur.MUST);
+            q.Add(bq, BooleanClause.Occur.MUST);
+            q.Add(new TermQuery(new Term("field2", "z")), BooleanClause.Occur.MUST);
+            assertEquals(q, GetQuery(query, new MockAnalyzer(Random())));
+        }
+    }
+
+
+    /// <summary>
+    /// This class was added in .NET because the Visual Studio test runner
+    /// does not detect tests in abstract classes. Therefore, the abstract members
+    /// of QueryParserTestBase were moved here so the QueryParserTestBase class
+    /// could be made concrete.
+    /// </summary>
+    public abstract class AbstractQueryParserTestBase : LuceneTestCase
+    {
+        public abstract void TestStarParsing();
+
+        public abstract void TestNewFieldQuery();
+
+        public abstract void TestDefaultOperator();
+
+        public abstract ICommonQueryParserConfiguration GetParserConfig(Analyzer a);
+
+        public abstract void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC);
+
+        public abstract void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC);
+
+        public abstract void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC, bool value);
+
+        public abstract void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC, bool value);
+
+        public abstract void SetDateResolution(ICommonQueryParserConfiguration cqpC, ICharSequence field, DateTools.Resolution value);
+
+        public abstract Query GetQuery(string query, ICommonQueryParserConfiguration cqpC);
+
+        public abstract Query GetQuery(string query, Analyzer a);
+
+        public abstract bool IsQueryParserException(Exception exception);
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/packages.config
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/packages.config b/src/Lucene.Net.Tests.QueryParser/packages.config
new file mode 100644
index 0000000..139d513
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/packages.config
@@ -0,0 +1,4 @@
+\ufeff<?xml version="1.0" encoding="utf-8"?>
+<packages>
+  <package id="NUnit" version="2.6.3" targetFramework="net451" />
+</packages>
\ No newline at end of file


[46/50] [abbrv] lucenenet git commit: Fixed bug in Classic.QueryParser caused by mistranslation from Java of break

Posted by sy...@apache.org.
Fixed bug in Classic.QueryParser caused by mistranslation from Java of break <label> and continue <label>.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/193c3489
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/193c3489
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/193c3489

Branch: refs/heads/master
Commit: 193c3489dea5daedaf7254668022c57813826c2a
Parents: bd78179
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sat Sep 3 00:33:18 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sat Sep 3 00:33:18 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.QueryParser/Classic/QueryParser.cs | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/193c3489/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParser.cs b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
index f52b4f7..d8f5ca9 100644
--- a/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
@@ -794,13 +794,15 @@ namespace Lucene.Net.QueryParser.Classic
                         {
                             if (oldentry[i] != jj_expentry[i])
                             {
-                                continue;
+                                goto jj_entries_loop_continue;
                             }
                         }
                         jj_expentries.Add(jj_expentry);
-                        break;
+                        goto jj_entries_loop_break;
                     }
+                jj_entries_loop_continue: ;
                 }
+            jj_entries_loop_break:
                 if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind;
             }
         }


[40/50] [abbrv] lucenenet git commit: Removed unnecessary usings.

Posted by sy...@apache.org.
Removed unnecessary usings.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/57069313
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/57069313
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/57069313

Branch: refs/heads/master
Commit: 57069313d04c2333106b7ed09d9e8db6de893b86
Parents: 2efd9b4
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Tue Aug 2 18:48:04 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:31:09 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs     | 5 ++---
 src/Lucene.Net.QueryParser/Classic/ParseException.cs            | 1 -
 src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs           | 1 -
 src/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs      | 2 --
 .../Surround/Parser/QueryParserConstants.cs                     | 4 +---
 5 files changed, 3 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/57069313/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs b/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs
index 151fe38..6338857 100644
--- a/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs
@@ -1,8 +1,7 @@
-using System;
-using System.Collections.Generic;
-using Lucene.Net.Search;
 using Lucene.Net.Analysis;
+using Lucene.Net.Search;
 using Lucene.Net.Util;
+using System.Collections.Generic;
 
 namespace Lucene.Net.QueryParser.Classic
 {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/57069313/src/Lucene.Net.QueryParser/Classic/ParseException.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/ParseException.cs b/src/Lucene.Net.QueryParser/Classic/ParseException.cs
index 161fa95..e9cccb4 100644
--- a/src/Lucene.Net.QueryParser/Classic/ParseException.cs
+++ b/src/Lucene.Net.QueryParser/Classic/ParseException.cs
@@ -1,6 +1,5 @@
 using System;
 using System.Text;
-using Lucene.Net.Support;
 
 namespace Lucene.Net.QueryParser.Classic
 {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/57069313/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs b/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
index 3e80f4a..0b872ae 100644
--- a/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
@@ -2,7 +2,6 @@
 using Lucene.Net.Analysis.Tokenattributes;
 using Lucene.Net.Documents;
 using Lucene.Net.Index;
-using Lucene.Net.QueryParser.Classic;
 using Lucene.Net.QueryParser.Flexible.Standard;
 using Lucene.Net.Search;
 using Lucene.Net.Support;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/57069313/src/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs b/src/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs
index dcfa193..29df459 100644
--- a/src/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs
@@ -1,5 +1,3 @@
-using System;
-
 namespace Lucene.Net.QueryParser.Classic
 {
     /*

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/57069313/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
index 262f76b..d02ab5d 100644
--- a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
@@ -1,6 +1,4 @@
-\ufeffusing System;
-
-namespace Lucene.Net.QueryParser.Surround.Parser
+\ufeffnamespace Lucene.Net.QueryParser.Surround.Parser
 {
     /*
      * Licensed to the Apache Software Foundation (ASF) under one or more


[33/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Ext/ParserExtension.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Ext/ParserExtension.cs b/Lucene.Net.QueryParser/Ext/ParserExtension.cs
deleted file mode 100644
index 27b9212..0000000
--- a/Lucene.Net.QueryParser/Ext/ParserExtension.cs
+++ /dev/null
@@ -1,50 +0,0 @@
-\ufeffusing Lucene.Net.QueryParser.Classic;
-using Lucene.Net.Search;
-
-namespace Lucene.Net.QueryParser.Ext
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// This class represents an extension base class to the Lucene standard
-    /// <see cref="Classic.QueryParser"/>. The
-    /// <see cref="Classic.QueryParser"/> is generated by the JavaCC
-    /// parser generator. Changing or adding functionality or syntax in the standard
-    /// query parser requires changes to the JavaCC source file. To enable extending
-    /// the standard query parser without changing the JavaCC sources and re-generate
-    /// the parser the <see cref="ParserExtension"/> can be customized and plugged into an
-    /// instance of <see cref="ExtendableQueryParser"/>, a direct subclass of
-    /// <see cref="Classic.QueryParser"/>.
-    ///  
-    /// <see cref="Extensions"/>
-    /// <see cref="ExtendableQueryParser"/>
-    /// </summary>
-    public abstract class ParserExtension
-    {
-        /// <summary>
-        /// Processes the given <see cref="ExtensionQuery"/> and returns a corresponding
-        /// <see cref="Query"/> instance. Subclasses must either return a <see cref="Query"/>
-        /// instance or raise a <see cref="ParseException"/>. This method must not return
-        /// <code>null</code>.
-        /// </summary>
-        /// <param name="query">the extension query</param>
-        /// <returns>a new query instance</returns>
-        /// <exception cref="ParseException">if the query can not be parsed.</exception>
-        public abstract Query Parse(ExtensionQuery query);
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Flexible/Standard/CommonQueryParserConfiguration.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Flexible/Standard/CommonQueryParserConfiguration.cs b/Lucene.Net.QueryParser/Flexible/Standard/CommonQueryParserConfiguration.cs
deleted file mode 100644
index ae3809f..0000000
--- a/Lucene.Net.QueryParser/Flexible/Standard/CommonQueryParserConfiguration.cs
+++ /dev/null
@@ -1,106 +0,0 @@
-\ufeffusing Lucene.Net.Analysis;
-using Lucene.Net.Documents;
-using Lucene.Net.Search;
-using System;
-using System.Globalization;
-
-namespace Lucene.Net.QueryParser.Flexible.Standard
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Configuration options common across queryparser implementations.
-    /// </summary>
-    public interface ICommonQueryParserConfiguration
-    {
-        /// <summary>
-        /// Whether terms of multi-term queries (e.g., wildcard,
-        /// prefix, fuzzy and range) should be automatically
-        /// lower-cased or not.  Default is <code>true</code>.
-        /// </summary>
-        bool LowercaseExpandedTerms { get; set; }
-
-        /// <summary>
-        /// Set to <code>true</code> to allow leading wildcard characters.
-        /// <p>
-        /// When set, <code>*</code> or <code>?</code> are allowed as the first
-        /// character of a PrefixQuery and WildcardQuery. Note that this can produce
-        /// very slow queries on big indexes.
-        /// <p>
-        /// Default: false.
-        /// </summary>
-        bool AllowLeadingWildcard { get; set; }
-
-        /// <summary>
-        /// Set to <code>true</code> to enable position increments in result query.
-        /// <p>
-        /// When set, result phrase and multi-phrase queries will be aware of position
-        /// increments. Useful when e.g. a StopFilter increases the position increment
-        /// of the token that follows an omitted token.
-        /// <p>
-        /// Default: false.
-        /// </summary>
-        bool EnablePositionIncrements { get; set; }
-
-        /// <summary>
-        /// By default, it uses 
-        /// {@link MultiTermQuery#CONSTANT_SCORE_AUTO_REWRITE_DEFAULT} when creating a
-        /// prefix, wildcard and range queries. This implementation is generally
-        /// preferable because it a) Runs faster b) Does not have the scarcity of terms
-        /// unduly influence score c) avoids any {@link TooManyListenersException}
-        /// exception. However, if your application really needs to use the
-        /// old-fashioned boolean queries expansion rewriting and the above points are
-        /// not relevant then use this change the rewrite method.
-        /// </summary>
-        MultiTermQuery.RewriteMethod MultiTermRewriteMethod { get; set; }
-
-        /// <summary>
-        /// Get or Set the prefix length for fuzzy queries. Default is 0.
-        /// </summary>
-        int FuzzyPrefixLength { get; set; }
-
-        /// <summary>
-        /// Get or Set locale used by date range parsing.
-        /// </summary>
-        CultureInfo Locale { get; set; }
-
-        /// <summary>
-        /// Gets or Sets the time zone.
-        /// </summary>
-        TimeZoneInfo TimeZone { get; set; }
-
-        /// <summary>
-        /// Gets or Sets the default slop for phrases. If zero, then exact phrase matches are
-        /// required. Default value is zero.
-        /// </summary>
-        int PhraseSlop { get; set; }
-
-        Analyzer Analyzer { get; }
-
-        /// <summary>
-        /// Get the minimal similarity for fuzzy queries.
-        /// </summary>
-        float FuzzyMinSim { get; set; }
-
-        /// <summary>
-        /// Sets the default <see cref="T:DateTools.Resolution"/> used for certain field when
-        /// no <see cref="T:DateTools.Resolution"/> is defined for this field.
-        /// </summary>
-        void SetDateResolution(DateTools.Resolution dateResolution);
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj b/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
deleted file mode 100644
index dc38a02..0000000
--- a/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
+++ /dev/null
@@ -1,107 +0,0 @@
-\ufeff<?xml version="1.0" encoding="utf-8"?>
-<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
-  <Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
-  <PropertyGroup>
-    <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
-    <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
-    <ProjectGuid>{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}</ProjectGuid>
-    <OutputType>Library</OutputType>
-    <AppDesignerFolder>Properties</AppDesignerFolder>
-    <RootNamespace>Lucene.Net.QueryParser</RootNamespace>
-    <AssemblyName>Lucene.Net.QueryParser</AssemblyName>
-    <TargetFrameworkVersion>v4.5.1</TargetFrameworkVersion>
-    <FileAlignment>512</FileAlignment>
-  </PropertyGroup>
-  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
-    <DebugSymbols>true</DebugSymbols>
-    <DebugType>full</DebugType>
-    <Optimize>false</Optimize>
-    <OutputPath>bin\Debug\</OutputPath>
-    <DefineConstants>DEBUG;TRACE</DefineConstants>
-    <ErrorReport>prompt</ErrorReport>
-    <WarningLevel>4</WarningLevel>
-  </PropertyGroup>
-  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
-    <DebugType>pdbonly</DebugType>
-    <Optimize>true</Optimize>
-    <OutputPath>bin\Release\</OutputPath>
-    <DefineConstants>TRACE</DefineConstants>
-    <ErrorReport>prompt</ErrorReport>
-    <WarningLevel>4</WarningLevel>
-  </PropertyGroup>
-  <ItemGroup>
-    <Reference Include="System" />
-    <Reference Include="System.Core" />
-    <Reference Include="System.Xml.Linq" />
-    <Reference Include="System.Data.DataSetExtensions" />
-    <Reference Include="Microsoft.CSharp" />
-    <Reference Include="System.Data" />
-    <Reference Include="System.Xml" />
-  </ItemGroup>
-  <ItemGroup>
-    <Compile Include="Analyzing\AnalyzingQueryParser.cs" />
-    <Compile Include="Classic\CharStream.cs" />
-    <Compile Include="Classic\FastCharStream.cs" />
-    <Compile Include="Classic\MultiFieldQueryParser.cs" />
-    <Compile Include="Classic\ParseException.cs" />
-    <Compile Include="Classic\QueryParser.cs" />
-    <Compile Include="Classic\QueryParserBase.cs" />
-    <Compile Include="Classic\QueryParserConstants.cs" />
-    <Compile Include="Classic\QueryParserTokenManager.cs" />
-    <Compile Include="Classic\Token.cs" />
-    <Compile Include="Classic\TokenMgrError.cs" />
-    <Compile Include="ComplexPhrase\ComplexPhraseQueryParser.cs" />
-    <Compile Include="Ext\ExtendableQueryParser.cs" />
-    <Compile Include="Ext\ExtensionQuery.cs" />
-    <Compile Include="Ext\Extensions.cs" />
-    <Compile Include="Ext\ParserExtension.cs" />
-    <Compile Include="Flexible\Standard\CommonQueryParserConfiguration.cs" />
-    <Compile Include="Properties\AssemblyInfo.cs" />
-    <Compile Include="Simple\SimpleQueryParser.cs" />
-    <Compile Include="Surround\Parser\CharStream.cs" />
-    <Compile Include="Surround\Parser\FastCharStream.cs" />
-    <Compile Include="Surround\Parser\ParseException.cs" />
-    <Compile Include="Surround\Parser\QueryParser.cs" />
-    <Compile Include="Surround\Parser\QueryParserConstants.cs" />
-    <Compile Include="Surround\Parser\QueryParserTokenManager.cs" />
-    <Compile Include="Surround\Parser\Token.cs" />
-    <Compile Include="Surround\Parser\TokenMgrError.cs" />
-    <Compile Include="Surround\Query\AndQuery.cs" />
-    <Compile Include="Surround\Query\BasicQueryFactory.cs" />
-    <Compile Include="Surround\Query\ComposedQuery.cs" />
-    <Compile Include="Surround\Query\DistanceQuery.cs" />
-    <Compile Include="Surround\Query\DistanceRewriteQuery.cs" />
-    <Compile Include="Surround\Query\DistanceSubQuery.cs" />
-    <Compile Include="Surround\Query\FieldsQuery.cs" />
-    <Compile Include="Surround\Query\NotQuery.cs" />
-    <Compile Include="Surround\Query\OrQuery.cs" />
-    <Compile Include="Surround\Query\RewriteQuery.cs" />
-    <Compile Include="Surround\Query\SimpleTerm.cs" />
-    <Compile Include="Surround\Query\SimpleTermRewriteQuery.cs" />
-    <Compile Include="Surround\Query\SpanNearClauseFactory.cs" />
-    <Compile Include="Surround\Query\SrndBooleanQuery.cs" />
-    <Compile Include="Surround\Query\SrndPrefixQuery.cs" />
-    <Compile Include="Surround\Query\SrndQuery.cs" />
-    <Compile Include="Surround\Query\SrndTermQuery.cs" />
-    <Compile Include="Surround\Query\SrndTruncQuery.cs" />
-    <Compile Include="Surround\Query\TooManyBasicQueries.cs" />
-  </ItemGroup>
-  <ItemGroup>
-    <ProjectReference Include="..\src\Lucene.Net.Analysis.Common\Lucene.Net.Analysis.Common.csproj">
-      <Project>{4add0bbc-b900-4715-9526-d871de8eea64}</Project>
-      <Name>Lucene.Net.Analysis.Common</Name>
-    </ProjectReference>
-    <ProjectReference Include="..\src\Lucene.Net.Core\Lucene.Net.csproj">
-      <Project>{5d4ad9be-1ffb-41ab-9943-25737971bf57}</Project>
-      <Name>Lucene.Net</Name>
-    </ProjectReference>
-  </ItemGroup>
-  <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
-  <!-- To modify your build process, add your task inside one of the targets below and uncomment it. 
-       Other similar extension points exist, see Microsoft.Common.targets.
-  <Target Name="BeforeBuild">
-  </Target>
-  <Target Name="AfterBuild">
-  </Target>
-  -->
-</Project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Properties/AssemblyInfo.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Properties/AssemblyInfo.cs b/Lucene.Net.QueryParser/Properties/AssemblyInfo.cs
deleted file mode 100644
index 023bf34..0000000
--- a/Lucene.Net.QueryParser/Properties/AssemblyInfo.cs
+++ /dev/null
@@ -1,39 +0,0 @@
-\ufeffusing System.Reflection;
-using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
-
-// General Information about an assembly is controlled through the following 
-// set of attributes. Change these attribute values to modify the information
-// associated with an assembly.
-[assembly: AssemblyTitle("Lucene.Net.QueryParser")]
-[assembly: AssemblyDescription("")]
-[assembly: AssemblyConfiguration("")]
-[assembly: AssemblyCompany("")]
-[assembly: AssemblyProduct("Lucene.Net.QueryParser")]
-[assembly: AssemblyCopyright("Copyright �  2016")]
-[assembly: AssemblyTrademark("")]
-[assembly: AssemblyCulture("")]
-
-// Setting ComVisible to false makes the types in this assembly not visible 
-// to COM components.  If you need to access a type in this assembly from 
-// COM, set the ComVisible attribute to true on that type.
-[assembly: ComVisible(false)]
-
-// The following GUID is for the ID of the typelib if this project is exposed to COM
-[assembly: Guid("7c58cf05-89dd-4c02-a948-c28cdaf05247")]
-
-// for testing
-[assembly: InternalsVisibleTo("Lucene.Net.Tests.QueryParser")]
-
-// Version information for an assembly consists of the following four values:
-//
-//      Major Version
-//      Minor Version 
-//      Build Number
-//      Revision
-//
-// You can specify all the values or you can default the Build and Revision Numbers 
-// by using the '*' as shown below:
-// [assembly: AssemblyVersion("1.0.*")]
-[assembly: AssemblyVersion("1.0.0.0")]
-[assembly: AssemblyFileVersion("1.0.0.0")]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs b/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
deleted file mode 100644
index 1029c8b..0000000
--- a/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
+++ /dev/null
@@ -1,788 +0,0 @@
-\ufeffusing Lucene.Net.Analysis;
-using Lucene.Net.Index;
-using Lucene.Net.Search;
-using Lucene.Net.Support;
-using Lucene.Net.Util;
-using Lucene.Net.Util.Automaton;
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.Linq;
-
-namespace Lucene.Net.QueryParser.Simple
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// SimpleQueryParser is used to parse human readable query syntax.
-    /// <p>
-    /// The main idea behind this parser is that a person should be able to type
-    /// whatever they want to represent a query, and this parser will do its best
-    /// to interpret what to search for no matter how poorly composed the request
-    /// may be. Tokens are considered to be any of a term, phrase, or subquery for the
-    /// operations described below.  Whitespace including ' ' '\n' '\r' and '\t'
-    /// and certain operators may be used to delimit tokens ( ) + | " .
-    /// <p>
-    /// Any errors in query syntax will be ignored and the parser will attempt
-    /// to decipher what it can; however, this may mean odd or unexpected results.
-    /// <h4>Query Operators</h4>
-    /// <ul>
-    ///  <li>'{@code +}' specifies {@code AND} operation: <tt>token1+token2</tt>
-    ///  <li>'{@code |}' specifies {@code OR} operation: <tt>token1|token2</tt>
-    ///  <li>'{@code -}' negates a single token: <tt>-token0</tt>
-    ///  <li>'{@code "}' creates phrases of terms: <tt>"term1 term2 ..."</tt>
-    ///  <li>'{@code *}' at the end of terms specifies prefix query: <tt>term*</tt>
-    ///  <li>'{@code ~}N' at the end of terms specifies fuzzy query: <tt>term~1</tt>
-    ///  <li>'{@code ~}N' at the end of phrases specifies near query: <tt>"term1 term2"~5</tt>
-    ///  <li>'{@code (}' and '{@code )}' specifies precedence: <tt>token1 + (token2 | token3)</tt>
-    /// </ul>
-    /// <p>
-    /// The {@link #setDefaultOperator default operator} is {@code OR} if no other operator is specified.
-    /// For example, the following will {@code OR} {@code token1} and {@code token2} together:
-    /// <tt>token1 token2</tt>
-    /// <p>
-    /// Normal operator precedence will be simple order from right to left.
-    /// For example, the following will evaluate {@code token1 OR token2} first,
-    /// then {@code AND} with {@code token3}:
-    /// <blockquote>token1 | token2 + token3</blockquote>
-    /// <h4>Escaping</h4>
-    /// <p>
-    /// An individual term may contain any possible character with certain characters
-    /// requiring escaping using a '{@code \}'.  The following characters will need to be escaped in
-    /// terms and phrases:
-    /// {@code + | " ( ) ' \}
-    /// <p>
-    /// The '{@code -}' operator is a special case.  On individual terms (not phrases) the first
-    /// character of a term that is {@code -} must be escaped; however, any '{@code -}' characters
-    /// beyond the first character do not need to be escaped.
-    /// For example:
-    /// <ul>
-    ///   <li>{@code -term1}   -- Specifies {@code NOT} operation against {@code term1}
-    ///   <li>{@code \-term1}  -- Searches for the term {@code -term1}.
-    ///   <li>{@code term-1}   -- Searches for the term {@code term-1}.
-    ///   <li>{@code term\-1}  -- Searches for the term {@code term-1}.
-    /// </ul>
-    /// <p>
-    /// The '{@code *}' operator is a special case. On individual terms (not phrases) the last
-    /// character of a term that is '{@code *}' must be escaped; however, any '{@code *}' characters
-    /// before the last character do not need to be escaped:
-    /// <ul>
-    ///   <li>{@code term1*}  --  Searches for the prefix {@code term1}
-    ///   <li>{@code term1\*} --  Searches for the term {@code term1*}
-    ///   <li>{@code term*1}  --  Searches for the term {@code term*1}
-    ///   <li>{@code term\*1} --  Searches for the term {@code term*1}
-    /// </ul>
-    /// <p>
-    /// Note that above examples consider the terms before text processing.
-    /// </summary>
-    public class SimpleQueryParser : QueryBuilder
-    {
-        /** Map of fields to query against with their weights */
-        protected readonly IDictionary<string, float> weights;
-
-        // TODO: Make these into a [Flags] enum in .NET??
-        /** flags to the parser (to turn features on/off) */
-        protected readonly int flags;
-
-        /** Enables {@code AND} operator (+) */
-        public static readonly int AND_OPERATOR         = 1<<0;
-        /** Enables {@code NOT} operator (-) */
-        public static readonly int NOT_OPERATOR         = 1<<1;
-        /** Enables {@code OR} operator (|) */
-        public static readonly int OR_OPERATOR          = 1<<2;
-        /** Enables {@code PREFIX} operator (*) */
-        public static readonly int PREFIX_OPERATOR      = 1<<3;
-        /** Enables {@code PHRASE} operator (") */
-        public static readonly int PHRASE_OPERATOR      = 1<<4;
-        /** Enables {@code PRECEDENCE} operators: {@code (} and {@code )} */
-        public static readonly int PRECEDENCE_OPERATORS = 1<<5;
-        /** Enables {@code ESCAPE} operator (\) */
-        public static readonly int ESCAPE_OPERATOR      = 1<<6;
-        /** Enables {@code WHITESPACE} operators: ' ' '\n' '\r' '\t' */
-        public static readonly int WHITESPACE_OPERATOR  = 1<<7;
-        /** Enables {@code FUZZY} operators: (~) on single terms */
-        public static readonly int FUZZY_OPERATOR       = 1<<8;
-        /** Enables {@code NEAR} operators: (~) on phrases */
-        public static readonly int NEAR_OPERATOR        = 1<<9;
-
-        private BooleanClause.Occur defaultOperator = BooleanClause.Occur.SHOULD;
-
-        /// <summary>
-        /// Creates a new parser searching over a single field.
-        /// </summary>
-        /// <param name="analyzer"></param>
-        /// <param name="field"></param>
-        public SimpleQueryParser(Analyzer analyzer, string field)
-            : this(analyzer, new HashMap<string, float>() { { field, 1.0F } })
-        {
-        }
-
-        /// <summary>
-        /// Creates a new parser searching over multiple fields with different weights.
-        /// </summary>
-        /// <param name="analyzer"></param>
-        /// <param name="weights"></param>
-        public SimpleQueryParser(Analyzer analyzer, IDictionary<string, float> weights)
-            : this(analyzer, weights, -1)
-        {
-        }
-
-        /// <summary>
-        /// Creates a new parser with custom flags used to enable/disable certain features.
-        /// </summary>
-        /// <param name="analyzer"></param>
-        /// <param name="weights"></param>
-        /// <param name="flags"></param>
-        public SimpleQueryParser(Analyzer analyzer, IDictionary<string, float> weights, int flags)
-            : base(analyzer)
-        {
-            this.weights = weights;
-            this.flags = flags;
-        }
-
-        /// <summary>
-        /// Parses the query text and returns parsed query (or null if empty)
-        /// </summary>
-        /// <param name="queryText"></param>
-        /// <returns></returns>
-        public Query Parse(string queryText)
-        {
-            char[] data = queryText.ToCharArray();
-            char[] buffer = new char[data.Length];
-
-            State state = new State(data, buffer, 0, data.Length);
-            ParseSubQuery(state);
-            return state.Top;
-        }
-
-        private void ParseSubQuery(State state)
-        {
-            while (state.Index < state.Length)
-            {
-                if (state.Data[state.Index] == '(' && (flags & PRECEDENCE_OPERATORS) != 0)
-                {
-                    // the beginning of a subquery has been found
-                    ConsumeSubQuery(state);
-                }
-                else if (state.Data[state.Index] == ')' && (flags & PRECEDENCE_OPERATORS) != 0)
-                {
-                    // this is an extraneous character so it is ignored
-                    ++state.Index;
-                }
-                else if (state.Data[state.Index] == '"' && (flags & PHRASE_OPERATOR) != 0)
-                {
-                    // the beginning of a phrase has been found
-                    ConsumePhrase(state);
-                }
-                else if (state.Data[state.Index] == '+' && (flags & AND_OPERATOR) != 0)
-                {
-                    // an and operation has been explicitly set
-                    // if an operation has already been set this one is ignored
-                    // if a term (or phrase or subquery) has not been found yet the
-                    // operation is also ignored since there is no previous
-                    // term (or phrase or subquery) to and with
-                    if (!state.CurrentOperationIsSet && state.Top != null)
-                    {
-                        state.CurrentOperation = BooleanClause.Occur.MUST;
-                    }
-
-                    ++state.Index;
-                }
-                else if (state.Data[state.Index] == '|' && (flags & OR_OPERATOR) != 0)
-                {
-                    // an or operation has been explicitly set
-                    // if an operation has already been set this one is ignored
-                    // if a term (or phrase or subquery) has not been found yet the
-                    // operation is also ignored since there is no previous
-                    // term (or phrase or subquery) to or with
-                    if (!state.CurrentOperationIsSet && state.Top != null)
-                    {
-                        state.CurrentOperation = BooleanClause.Occur.SHOULD;
-                    }
-
-                    ++state.Index;
-                }
-                else if (state.Data[state.Index] == '-' && (flags & NOT_OPERATOR) != 0)
-                {
-                    // a not operator has been found, so increase the not count
-                    // two not operators in a row negate each other
-                    ++state.Not;
-                    ++state.Index;
-
-                    // continue so the not operator is not reset
-                    // before the next character is determined
-                    continue;
-                }
-                else if ((state.Data[state.Index] == ' '
-                  || state.Data[state.Index] == '\t'
-                  || state.Data[state.Index] == '\n'
-                  || state.Data[state.Index] == '\r') && (flags & WHITESPACE_OPERATOR) != 0)
-                {
-                    // ignore any whitespace found as it may have already been
-                    // used a delimiter across a term (or phrase or subquery)
-                    // or is simply extraneous
-                    ++state.Index;
-                }
-                else
-                {
-                    // the beginning of a token has been found
-                    ConsumeToken(state);
-                }
-
-                // reset the not operator as even whitespace is not allowed when
-                // specifying the not operation for a term (or phrase or subquery)
-                state.Not = 0;
-            }
-        }
-
-        private void ConsumeSubQuery(State state)
-        {
-            Debug.Assert((flags & PRECEDENCE_OPERATORS) != 0);
-            int start = ++state.Index;
-            int precedence = 1;
-            bool escaped = false;
-
-            while (state.Index < state.Length)
-            {
-                if (!escaped)
-                {
-                    if (state.Data[state.Index] == '\\' && (flags & ESCAPE_OPERATOR) != 0)
-                    {
-                        // an escape character has been found so
-                        // whatever character is next will become
-                        // part of the subquery unless the escape
-                        // character is the last one in the data
-                        escaped = true;
-                        ++state.Index;
-
-                        continue;
-                    }
-                    else if (state.Data[state.Index] == '(')
-                    {
-                        // increase the precedence as there is a
-                        // subquery in the current subquery
-                        ++precedence;
-                    }
-                    else if (state.Data[state.Index] == ')')
-                    {
-                        --precedence;
-
-                        if (precedence == 0)
-                        {
-                            // this should be the end of the subquery
-                            // all characters found will used for
-                            // creating the subquery
-                            break;
-                        }
-                    }
-                }
-
-                escaped = false;
-                ++state.Index;
-            }
-
-            if (state.Index == state.Length)
-            {
-                // a closing parenthesis was never found so the opening
-                // parenthesis is considered extraneous and will be ignored
-                state.Index = start;
-            }
-            else if (state.Index == start)
-            {
-                // a closing parenthesis was found immediately after the opening
-                // parenthesis so the current operation is reset since it would
-                // have been applied to this subquery
-                state.CurrentOperationIsSet = false;
-
-                ++state.Index;
-            }
-            else
-            {
-                // a complete subquery has been found and is recursively parsed by
-                // starting over with a new state object
-                State subState = new State(state.Data, state.Buffer, start, state.Index);
-                ParseSubQuery(subState);
-                BuildQueryTree(state, subState.Top);
-
-                ++state.Index;
-            }
-        }
-
-        private void ConsumePhrase(State state)
-        {
-            Debug.Assert((flags & PHRASE_OPERATOR) != 0);
-            int start = ++state.Index;
-            int copied = 0;
-            bool escaped = false;
-            bool hasSlop = false;
-
-            while (state.Index < state.Length)
-            {
-                if (!escaped)
-                {
-                    if (state.Data[state.Index] == '\\' && (flags & ESCAPE_OPERATOR) != 0)
-                    {
-                        // an escape character has been found so
-                        // whatever character is next will become
-                        // part of the phrase unless the escape
-                        // character is the last one in the data
-                        escaped = true;
-                        ++state.Index;
-
-                        continue;
-                    }
-                    else if (state.Data[state.Index] == '"')
-                    {
-                        // if there are still characters after the closing ", check for a
-                        // tilde
-                        if (state.Length > (state.Index + 1) &&
-                            state.Data[state.Index + 1] == '~' &&
-                            (flags & NEAR_OPERATOR) != 0)
-                        {
-                            state.Index++;
-                            // check for characters after the tilde
-                            if (state.Length > (state.Index + 1))
-                            {
-                                hasSlop = true;
-                            }
-                            break;
-                        }
-                        else
-                        {
-                            // this should be the end of the phrase
-                            // all characters found will used for
-                            // creating the phrase query
-                            break;
-                        }
-                    }
-                }
-
-                escaped = false;
-                state.Buffer[copied++] = state.Data[state.Index++];
-            }
-
-            if (state.Index == state.Length)
-            {
-                // a closing double quote was never found so the opening
-                // double quote is considered extraneous and will be ignored
-                state.Index = start;
-            }
-            else if (state.Index == start)
-            {
-                // a closing double quote was found immediately after the opening
-                // double quote so the current operation is reset since it would
-                // have been applied to this phrase
-                state.CurrentOperationIsSet = false;
-
-                ++state.Index;
-            }
-            else
-            {
-                // a complete phrase has been found and is parsed through
-                // through the analyzer from the given field
-                string phrase = new string(state.Buffer, 0, copied);
-                Query branch;
-                if (hasSlop)
-                {
-                    branch = NewPhraseQuery(phrase, ParseFuzziness(state));
-                }
-                else
-                {
-                    branch = NewPhraseQuery(phrase, 0);
-                }
-                BuildQueryTree(state, branch);
-
-                ++state.Index;
-            }
-        }
-
-        private void ConsumeToken(State state)
-        {
-            int copied = 0;
-            bool escaped = false;
-            bool prefix = false;
-            bool fuzzy = false;
-
-            while (state.Index < state.Length)
-            {
-                if (!escaped)
-                {
-                    if (state.Data[state.Index] == '\\' && (flags & ESCAPE_OPERATOR) != 0)
-                    {
-                        // an escape character has been found so
-                        // whatever character is next will become
-                        // part of the term unless the escape
-                        // character is the last one in the data
-                        escaped = true;
-                        prefix = false;
-                        ++state.Index;
-
-                        continue;
-                    }
-                    else if (TokenFinished(state))
-                    {
-                        // this should be the end of the term
-                        // all characters found will used for
-                        // creating the term query
-                        break;
-                    }
-                    else if (copied > 0 && state.Data[state.Index] == '~' && (flags & FUZZY_OPERATOR) != 0)
-                    {
-                        fuzzy = true;
-                        break;
-                    }
-
-                    // wildcard tracks whether or not the last character
-                    // was a '*' operator that hasn't been escaped
-                    // there must be at least one valid character before
-                    // searching for a prefixed set of terms
-                    prefix = copied > 0 && state.Data[state.Index] == '*' && (flags & PREFIX_OPERATOR) != 0;
-                }
-
-                escaped = false;
-                state.Buffer[copied++] = state.Data[state.Index++];
-            }
-
-            if (copied > 0)
-            {
-                Query branch;
-
-                if (fuzzy && (flags & FUZZY_OPERATOR) != 0)
-                {
-                    string token = new string(state.Buffer, 0, copied);
-                    int fuzziness = ParseFuzziness(state);
-                    // edit distance has a maximum, limit to the maximum supported
-                    fuzziness = Math.Min(fuzziness, LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE);
-                    if (fuzziness == 0)
-                    {
-                        branch = NewDefaultQuery(token);
-                    }
-                    else
-                    {
-                        branch = NewFuzzyQuery(token, fuzziness);
-                    }
-                }
-                else if (prefix)
-                {
-                    // if a term is found with a closing '*' it is considered to be a prefix query
-                    // and will have prefix added as an option
-                    string token = new string(state.Buffer, 0, copied - 1);
-                    branch = NewPrefixQuery(token);
-                }
-                else
-                {
-                    // a standard term has been found so it will be run through
-                    // the entire analysis chain from the specified schema field
-                    string token = new string(state.Buffer, 0, copied);
-                    branch = NewDefaultQuery(token);
-                }
-
-                BuildQueryTree(state, branch);
-            }
-        }
-
-        /// <summary>
-        /// buildQueryTree should be called after a term, phrase, or subquery
-        /// is consumed to be added to our existing query tree
-        /// this method will only add to the existing tree if the branch contained in state is not null
-        /// </summary>
-        /// <param name="state"></param>
-        /// <param name="branch"></param>
-        private void BuildQueryTree(State state, Query branch)
-        {
-            if (branch != null)
-            {
-                // modify our branch to a BooleanQuery wrapper for not
-                // this is necessary any time a term, phrase, or subquery is negated
-                if (state.Not % 2 == 1)
-                {
-                    BooleanQuery nq = new BooleanQuery();
-                    nq.Add(branch, BooleanClause.Occur.MUST_NOT);
-                    nq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
-                    branch = nq;
-                }
-
-                // first term (or phrase or subquery) found and will begin our query tree
-                if (state.Top == null)
-                {
-                    state.Top = branch;
-                }
-                else
-                {
-                    // more than one term (or phrase or subquery) found
-                    // set currentOperation to the default if no other operation is explicitly set
-                    if (!state.CurrentOperationIsSet)
-                    {
-                        state.CurrentOperation = defaultOperator;
-                    }
-
-                    // operational change requiring a new parent node
-                    // this occurs if the previous operation is not the same as current operation
-                    // because the previous operation must be evaluated separately to preserve
-                    // the proper precedence and the current operation will take over as the top of the tree
-                    if (!state.PreviousOperationIsSet || state.PreviousOperation != state.CurrentOperation)
-                    {
-                        BooleanQuery bq = new BooleanQuery();
-                        bq.Add(state.Top, state.CurrentOperation);
-                        state.Top = bq;
-                    }
-
-                    // reset all of the state for reuse
-                    ((BooleanQuery)state.Top).Add(branch, state.CurrentOperation);
-                    state.PreviousOperation = state.CurrentOperation;
-                }
-
-                // reset the current operation as it was intended to be applied to
-                // the incoming term (or phrase or subquery) even if branch was null
-                // due to other possible errors
-                state.CurrentOperationIsSet = false;
-            }
-        }
-
-        /// <summary>
-        /// Helper parsing fuzziness from parsing state
-        /// </summary>
-        /// <param name="state"></param>
-        /// <returns>slop/edit distance, 0 in the case of non-parsing slop/edit string</returns>
-        private int ParseFuzziness(State state)
-        {
-            char[] slopText = new char[state.Length];
-            int slopLength = 0;
-
-            if (state.Data[state.Index] == '~')
-            {
-                while (state.Index < state.Length)
-                {
-                    state.Index++;
-                    // it's possible that the ~ was at the end, so check after incrementing
-                    // to make sure we don't go out of bounds
-                    if (state.Index < state.Length)
-                    {
-                        if (TokenFinished(state))
-                        {
-                            break;
-                        }
-                        slopText[slopLength] = state.Data[state.Index];
-                        slopLength++;
-                    }
-                }
-                int fuzziness = 0;
-                int.TryParse(new string(slopText, 0, slopLength), out fuzziness);
-                // negative -> 0
-                if (fuzziness < 0)
-                {
-                    fuzziness = 0;
-                }
-                return fuzziness;
-            }
-            return 0;
-        }
-
-        /// <summary>
-        /// Helper returning true if the state has reached the end of token.
-        /// </summary>
-        /// <param name="state"></param>
-        /// <returns></returns>
-        private bool TokenFinished(State state)
-        {
-            if ((state.Data[state.Index] == '"' && (flags & PHRASE_OPERATOR) != 0)
-                || (state.Data[state.Index] == '|' && (flags & OR_OPERATOR) != 0)
-                || (state.Data[state.Index] == '+' && (flags & AND_OPERATOR) != 0)
-                || (state.Data[state.Index] == '(' && (flags & PRECEDENCE_OPERATORS) != 0)
-                || (state.Data[state.Index] == ')' && (flags & PRECEDENCE_OPERATORS) != 0)
-                || ((state.Data[state.Index] == ' '
-                || state.Data[state.Index] == '\t'
-                || state.Data[state.Index] == '\n'
-                || state.Data[state.Index] == '\r') && (flags & WHITESPACE_OPERATOR) != 0))
-            {
-                return true;
-            }
-            return false;
-        }
-
-        /// <summary>
-        /// Factory method to generate a standard query (no phrase or prefix operators).
-        /// </summary>
-        /// <param name="text"></param>
-        /// <returns></returns>
-        protected virtual Query NewDefaultQuery(string text)
-        {
-            BooleanQuery bq = new BooleanQuery(true);
-            foreach (var entry in weights)
-            {
-                Query q = CreateBooleanQuery(entry.Key, text, defaultOperator);
-                if (q != null)
-                {
-                    q.Boost = entry.Value;
-                    bq.Add(q, BooleanClause.Occur.SHOULD);
-                }
-            }
-            return Simplify(bq);
-        }
-
-        /// <summary>
-        /// Factory method to generate a fuzzy query.
-        /// </summary>
-        /// <param name="text"></param>
-        /// <param name="fuzziness"></param>
-        /// <returns></returns>
-        protected virtual Query NewFuzzyQuery(string text, int fuzziness)
-        {
-            BooleanQuery bq = new BooleanQuery(true);
-            foreach (var entry in weights)
-            {
-                Query q = new FuzzyQuery(new Term(entry.Key, text), fuzziness);
-                if (q != null)
-                {
-                    q.Boost = entry.Value;
-                    bq.Add(q, BooleanClause.Occur.SHOULD);
-                }
-            }
-            return Simplify(bq);
-        }
-
-        /// <summary>
-        /// Factory method to generate a phrase query with slop.
-        /// </summary>
-        /// <param name="text"></param>
-        /// <param name="slop"></param>
-        /// <returns></returns>
-        protected virtual Query NewPhraseQuery(string text, int slop)
-        {
-            BooleanQuery bq = new BooleanQuery(true);
-            foreach (var entry in weights)
-            {
-                Query q = CreatePhraseQuery(entry.Key, text, slop);
-                if (q != null)
-                {
-                    q.Boost = entry.Value;
-                    bq.Add(q, BooleanClause.Occur.SHOULD);
-                }
-            }
-            return Simplify(bq);
-        }
-
-        /// <summary>
-        /// Factory method to generate a prefix query.
-        /// </summary>
-        /// <param name="text"></param>
-        /// <returns></returns>
-        protected virtual Query NewPrefixQuery(string text)
-        {
-            BooleanQuery bq = new BooleanQuery(true);
-            foreach (var entry in weights)
-            {
-                PrefixQuery prefix = new PrefixQuery(new Term(entry.Key, text));
-                prefix.Boost = entry.Value;
-                bq.Add(prefix, BooleanClause.Occur.SHOULD);
-            }
-            return Simplify(bq);
-        }
-
-        /// <summary>
-        /// Helper to simplify boolean queries with 0 or 1 clause
-        /// </summary>
-        /// <param name="bq"></param>
-        /// <returns></returns>
-        protected virtual Query Simplify(BooleanQuery bq)
-        {
-            if (!bq.Clauses.Any())
-            {
-                return null;
-            }
-            else if (bq.Clauses.Length == 1)
-            {
-                return bq.Clauses[0].Query;
-            }
-            else
-            {
-                return bq;
-            }
-        }
-
-        /// <summary>
-        /// Gets or Sets the implicit operator setting, which will be
-        /// either {@code SHOULD} or {@code MUST}.
-        /// </summary>
-        public virtual BooleanClause.Occur DefaultOperator
-        {
-            get { return defaultOperator; }
-            set { defaultOperator = value; }
-        }
-
-
-        public class State
-        {
-            //private readonly char[] data;   // the characters in the query string
-            //private readonly char[] buffer; // a temporary buffer used to reduce necessary allocations
-            //private int index;
-            //private int length;
-
-            private BooleanClause.Occur currentOperation;
-            private BooleanClause.Occur previousOperation;
-            //private int not;
-
-            //private Query top;
-
-            internal State(char[] data, char[] buffer, int index, int length)
-            {
-                this.Data = data;
-                this.Buffer = buffer;
-                this.Index = index;
-                this.Length = length;
-            }
-
-            public char[] Data { get; protected set; } // the characters in the query string
-            public char[] Buffer { get; protected set; } // a temporary buffer used to reduce necessary allocations
-            public int Index { get; set; }
-            public int Length { get; protected set; }
-
-            public BooleanClause.Occur CurrentOperation 
-            {
-                get 
-                { 
-                    return currentOperation; 
-                }
-                set
-                {
-                    currentOperation = value;
-                    CurrentOperationIsSet = true;
-                }
-            }
-
-            public BooleanClause.Occur PreviousOperation
-            {
-                get
-                {
-                    return previousOperation;
-                }
-                set
-                {
-                    previousOperation = value;
-                    PreviousOperationIsSet = true;
-                }
-            }
-
-            public bool CurrentOperationIsSet { get; set; }
-            public bool PreviousOperationIsSet { get; set; }
-
-            public int Not { get; set; }
-            public Query Top { get; set; }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs b/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs
deleted file mode 100644
index bfb2fc2..0000000
--- a/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs
+++ /dev/null
@@ -1,134 +0,0 @@
-\ufeffusing System;
-
-namespace Lucene.Net.QueryParser.Surround.Parser
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-
-    /// <summary> 
-    /// This interface describes a character stream that maintains line and
-    /// column number positions of the characters.  It also has the capability
-    /// to backup the stream to some extent.  An implementation of this
-    /// interface is used in the TokenManager implementation generated by
-    /// JavaCCParser.
-    /// 
-    /// All the methods except backup can be implemented in any fashion. backup
-    /// needs to be implemented correctly for the correct operation of the lexer.
-    /// Rest of the methods are all used to get information like line number,
-    /// column number and the String that constitutes a token and are not used
-    /// by the lexer. Hence their implementation won't affect the generated lexer's
-    /// operation.
-    /// </summary>
-    public interface ICharStream
-    {
-        /// <summary> 
-        /// Returns the next character from the selected input.  The method
-        /// of selecting the input is the responsibility of the class
-        /// implementing this interface.  Can throw any java.io.IOException.
-        /// </summary>
-        char ReadChar();
-
-        /// <summary>
-        /// Returns the column position of the character last read.
-        /// </summary>
-        /// <deprecated>
-        /// </deprecated>
-        /// <seealso cref="EndColumn">
-        /// </seealso>
-        [Obsolete]
-        int Column { get; }
-
-        /// <summary>
-        /// Returns the line number of the character last read.
-        /// </summary>
-        /// <deprecated>
-        /// </deprecated>
-        /// <seealso cref="EndLine">
-        /// </seealso>
-        [Obsolete]
-        int Line { get; }
-
-        /// <summary>
-        /// Returns the column number of the last character for current token (being
-        /// matched after the last call to BeginTOken).
-        /// </summary>
-        int EndColumn { get; }
-
-        /// <summary> 
-        /// Returns the line number of the last character for current token (being
-        /// matched after the last call to BeginTOken).
-        /// </summary>
-        int EndLine { get; }
-
-        /// <summary> 
-        /// Returns the column number of the first character for current token (being
-        /// matched after the last call to BeginTOken).
-        /// </summary>
-        int BeginColumn { get; }
-
-        /// <summary> 
-        /// Returns the line number of the first character for current token (being
-        /// matched after the last call to BeginTOken).
-        /// </summary>
-        int BeginLine { get; }
-
-        /// <summary> 
-        /// Backs up the input stream by amount steps. Lexer calls this method if it
-        /// had already read some characters, but could not use them to match a
-        /// (longer) token. So, they will be used again as the prefix of the next
-        /// token and it is the implemetation's responsibility to do this right.
-        /// </summary>
-        void Backup(int amount);
-
-        /// <summary> 
-        /// Returns the next character that marks the beginning of the next token.
-        /// All characters must remain in the buffer between two successive calls
-        /// to this method to implement backup correctly.
-        /// </summary>
-        char BeginToken();
-
-        /// <summary> 
-        /// Returns a string made up of characters from the marked token beginning
-        /// to the current buffer position. Implementations have the choice of returning
-        /// anything that they want to. For example, for efficiency, one might decide
-        /// to just return null, which is a valid implementation.
-        /// </summary>
-        string Image { get; }
-
-        /// <summary> 
-        /// Returns an array of characters that make up the suffix of length 'len' for
-        /// the currently matched token. This is used to build up the matched string
-        /// for use in actions in the case of MORE. A simple and inefficient
-        /// implementation of this is as follows :
-        /// 
-        /// {
-        /// String t = GetImage();
-        /// return t.substring(t.length() - len, t.length()).toCharArray();
-        /// }
-        /// </summary>
-        char[] GetSuffix(int len);
-
-        /// <summary> 
-        /// The lexer calls this function to indicate that it is done with the stream
-        /// and hence implementations can free any resources held by this class.
-        /// Again, the body of this function can be just empty and it will not
-        /// affect the lexer's operation.
-        /// </summary>
-        void Done();
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs b/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs
deleted file mode 100644
index b33bd83..0000000
--- a/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs
+++ /dev/null
@@ -1,158 +0,0 @@
-\ufeffusing System;
-
-namespace Lucene.Net.QueryParser.Surround.Parser
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// An efficient implementation of JavaCC's CharStream interface.  <p/>Note that
-    /// this does not do line-number counting, but instead keeps track of the
-    /// character position of the token in the input, as required by Lucene's <see cref="Lucene.Net.Analysis.Token" />
-    /// API.
-    /// </summary>
-    public sealed class FastCharStream : ICharStream
-    {
-        internal char[] buffer = null;
-
-        internal int bufferLength = 0; // end of valid chars
-        internal int bufferPosition = 0; // next char to read
-
-        internal int tokenStart = 0; // offset in buffer
-        internal int bufferStart = 0; // position in file of buffer
-
-        internal System.IO.TextReader input; // source of chars
-
-        /// <summary>
-        /// Constructs from a Reader. 
-        /// </summary>
-        public FastCharStream(System.IO.TextReader r)
-        {
-            input = r;
-        }
-
-        public char ReadChar()
-        {
-            if (bufferPosition >= bufferLength)
-                Refill();
-            return buffer[bufferPosition++];
-        }
-
-        private void Refill()
-        {
-            int newPosition = bufferLength - tokenStart;
-
-            if (tokenStart == 0)
-            {
-                // token won't fit in buffer
-                if (buffer == null)
-                {
-                    // first time: alloc buffer
-                    buffer = new char[2048];
-                }
-                else if (bufferLength == buffer.Length)
-                {
-                    // grow buffer
-                    char[] newBuffer = new char[buffer.Length * 2];
-                    Array.Copy(buffer, 0, newBuffer, 0, bufferLength);
-                    buffer = newBuffer;
-                }
-            }
-            else
-            {
-                // shift token to front
-                Array.Copy(buffer, tokenStart, buffer, 0, newPosition);
-            }
-
-            bufferLength = newPosition; // update state
-            bufferPosition = newPosition;
-            bufferStart += tokenStart;
-            tokenStart = 0;
-
-            int charsRead = input.Read(buffer, newPosition, buffer.Length - newPosition);
-            if (charsRead <= 0)
-                throw new System.IO.IOException("read past eof");
-            else
-                bufferLength += charsRead;
-        }
-
-        public char BeginToken()
-        {
-            tokenStart = bufferPosition;
-            return ReadChar();
-        }
-
-        public void Backup(int amount)
-        {
-            bufferPosition -= amount;
-        }
-
-        public string Image
-        {
-            get { return new System.String(buffer, tokenStart, bufferPosition - tokenStart); }
-        }
-
-        public char[] GetSuffix(int len)
-        {
-            char[] value_Renamed = new char[len];
-            Array.Copy(buffer, bufferPosition - len, value_Renamed, 0, len);
-            return value_Renamed;
-        }
-
-        public void Done()
-        {
-            try
-            {
-                input.Close();
-            }
-            catch (System.IO.IOException e)
-            {
-                System.Console.Error.WriteLine("Caught: " + e + "; ignoring.");
-            }
-        }
-
-        public int Column
-        {
-            get { return bufferStart + bufferPosition; }
-        }
-
-        public int Line
-        {
-            get { return 1; }
-        }
-
-        public int EndColumn
-        {
-            get { return bufferStart + bufferPosition; }
-        }
-
-        public int EndLine
-        {
-            get { return 1; }
-        }
-
-        public int BeginColumn
-        {
-            get { return bufferStart + tokenStart; }
-        }
-
-        public int BeginLine
-        {
-            get { return 1; }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs b/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
deleted file mode 100644
index 1716658..0000000
--- a/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
+++ /dev/null
@@ -1,234 +0,0 @@
-\ufeffusing System;
-using System.Text;
-
-namespace Lucene.Net.QueryParser.Surround.Parser
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary> 
-    /// This exception is thrown when parse errors are encountered.
-    /// You can explicitly create objects of this exception type by
-    /// calling the method GenerateParseException in the generated
-    /// parser.
-    /// 
-    /// You can modify this class to customize your error reporting
-    /// mechanisms so long as you retain the public fields.
-    /// </summary>
-    [Serializable]
-    public class ParseException : Exception
-    {
-        /// <summary>
-        /// This constructor is used by the method "GenerateParseException"
-        /// in the generated parser.  Calling this constructor generates
-        /// a new object of this type with the fields "currentToken",
-        /// "expectedTokenSequences", and "tokenImage" set.
-        /// </summary>
-        /// <param name="currentTokenVal"></param>
-        /// <param name="expectedTokenSequencesVal"></param>
-        /// <param name="tokenImageVal"></param>
-        public ParseException(Token currentTokenVal,
-                        int[][] expectedTokenSequencesVal,
-                        string[] tokenImageVal)
-            : base(Initialize(currentTokenVal, expectedTokenSequencesVal, tokenImageVal))
-        {
-            currentToken = currentTokenVal;
-            expectedTokenSequences = expectedTokenSequencesVal;
-            tokenImage = tokenImageVal;
-        }
-
-        /**
-         * The following constructors are for use by you for whatever
-         * purpose you can think of.  Constructing the exception in this
-         * manner makes the exception behave in the normal way - i.e., as
-         * documented in the class "Throwable".  The fields "errorToken",
-         * "expectedTokenSequences", and "tokenImage" do not contain
-         * relevant information.  The JavaCC generated code does not use
-         * these constructors.
-         */
-
-        public ParseException()
-        { }
-
-        public ParseException(string message)
-            : base(message)
-        { }
-
-        public ParseException(string message, Exception innerException)
-            : base(message, innerException)
-        { }
-
-
-        /// <summary> 
-        /// This is the last token that has been consumed successfully.  If
-        /// this object has been created due to a parse error, the token
-        /// following this token will (therefore) be the first error token.
-        /// </summary>
-        public Token currentToken;
-
-        /// <summary> 
-        /// Each entry in this array is an array of integers.  Each array
-        /// of integers represents a sequence of tokens (by their ordinal
-        /// values) that is expected at this point of the parse.
-        /// </summary>
-        public int[][] expectedTokenSequences;
-
-        /// <summary> 
-        /// This is a reference to the "tokenImage" array of the generated
-        /// parser within which the parse error occurred.  This array is
-        /// defined in the generated ...Constants interface.
-        /// </summary>
-        public string[] tokenImage;
-
-
-        /// <summary>
-        /// It uses "currentToken" and "expectedTokenSequences" to generate a parse
-        /// error message and returns it.  If this object has been created
-        /// due to a parse error, and you do not catch it (it gets thrown
-        /// from the parser) the correct error message
-        /// gets displayed.
-        /// </summary>
-        /// <param name="currentToken"></param>
-        /// <param name="expectedTokenSequences"></param>
-        /// <param name="tokenImage"></param>
-        /// <returns></returns>
-        private static string Initialize(Token currentToken,
-            int[][] expectedTokenSequences,
-            string[] tokenImage)
-        {
-
-            StringBuilder expected = new StringBuilder();
-            int maxSize = 0;
-            for (int i = 0; i < expectedTokenSequences.Length; i++)
-            {
-                if (maxSize < expectedTokenSequences[i].Length)
-                {
-                    maxSize = expectedTokenSequences[i].Length;
-                }
-                for (int j = 0; j < expectedTokenSequences[i].Length; j++)
-                {
-                    expected.Append(tokenImage[expectedTokenSequences[i][j]]).Append(' ');
-                }
-                if (expectedTokenSequences[i][expectedTokenSequences[i].Length - 1] != 0)
-                {
-                    expected.Append("...");
-                }
-                expected.Append(eol).Append("    ");
-            }
-            string retval = "Encountered \"";
-            Token tok = currentToken.next;
-            for (int i = 0; i < maxSize; i++)
-            {
-                if (i != 0)
-                    retval += " ";
-                if (tok.kind == 0)
-                {
-                    retval += tokenImage[0];
-                    break;
-                }
-                retval += (" " + tokenImage[tok.kind]);
-                retval += " \"";
-                retval += Add_escapes(tok.image);
-                retval += " \"";
-                tok = tok.next;
-            }
-            retval += ("\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn);
-            retval += ("." + eol);
-            if (expectedTokenSequences.Length == 1)
-            {
-                retval += ("Was expecting:" + eol + "    ");
-            }
-            else
-            {
-                retval += ("Was expecting one of:" + eol + "    ");
-            }
-            retval += expected.ToString();
-            return retval;
-        }
-
-        /// <summary> 
-        /// The end of line string for this machine.
-        /// </summary>
-        protected static string eol = Environment.NewLine;
-
-        /// <summary> 
-        /// Used to convert raw characters to their escaped version
-        /// when these raw version cannot be used as part of an ASCII
-        /// string literal.
-        /// </summary>
-        internal static string Add_escapes(string str)
-        {
-            StringBuilder retval = new StringBuilder();
-            char ch;
-            for (int i = 0; i < str.Length; i++)
-            {
-                switch (str[i])
-                {
-
-                    case (char)(0):
-                        continue;
-
-                    case '\b':
-                        retval.Append("\\b");
-                        continue;
-
-                    case '\t':
-                        retval.Append("\\t");
-                        continue;
-
-                    case '\n':
-                        retval.Append("\\n");
-                        continue;
-
-                    case '\f':
-                        retval.Append("\\f");
-                        continue;
-
-                    case '\r':
-                        retval.Append("\\r");
-                        continue;
-
-                    case '\"':
-                        retval.Append("\\\"");
-                        continue;
-
-                    case '\'':
-                        retval.Append("\\\'");
-                        continue;
-
-                    case '\\':
-                        retval.Append("\\\\");
-                        continue;
-
-                    default:
-                        if ((ch = str[i]) < 0x20 || ch > 0x7e)
-                        {
-                            System.String s = "0000" + System.Convert.ToString(ch, 16);
-                            retval.Append("\\u" + s.Substring(s.Length - 4, (s.Length) - (s.Length - 4)));
-                        }
-                        else
-                        {
-                            retval.Append(ch);
-                        }
-                        continue;
-
-                }
-            }
-            return retval.ToString();
-        }
-    }
-}
\ No newline at end of file


[09/50] [abbrv] lucenenet git commit: Fixed Substring bugs because of the difference between the Java and .NET Substring function.

Posted by sy...@apache.org.
Fixed Substring bugs because of the difference between the Java and .NET Substring function.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/11ecedc6
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/11ecedc6
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/11ecedc6

Branch: refs/heads/master
Commit: 11ecedc63aa53beed200d573357d9fc56b6987fb
Parents: 4e04b59
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Jul 31 21:07:05 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:30:10 2016 +0700

----------------------------------------------------------------------
 Lucene.Net.QueryParser/Classic/QueryParser.cs           | 4 ++--
 Lucene.Net.QueryParser/Classic/QueryParserBase.cs       | 4 ++--
 Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs | 2 +-
 3 files changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/11ecedc6/Lucene.Net.QueryParser/Classic/QueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/QueryParser.cs b/Lucene.Net.QueryParser/Classic/QueryParser.cs
index f8b414f..e86c716 100644
--- a/Lucene.Net.QueryParser/Classic/QueryParser.cs
+++ b/Lucene.Net.QueryParser/Classic/QueryParser.cs
@@ -485,7 +485,7 @@ namespace Lucene.Net.QueryParser.Classic
                     bool endOpen = false;
                     if (goop1.kind == RegexpToken.RANGE_QUOTED)
                     {
-                        goop1.image = goop1.image.Substring(1, goop1.image.Length - 1);
+                        goop1.image = goop1.image.Substring(1, goop1.image.Length - 2);
                     }
                     else if ("*".Equals(goop1.image))
                     {
@@ -493,7 +493,7 @@ namespace Lucene.Net.QueryParser.Classic
                     }
                     if (goop2.kind == RegexpToken.RANGE_QUOTED)
                     {
-                        goop2.image = goop2.image.Substring(1, goop2.image.Length - 1);
+                        goop2.image = goop2.image.Substring(1, goop2.image.Length - 2);
                     }
                     else if ("*".Equals(goop2.image))
                     {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/11ecedc6/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/QueryParserBase.cs b/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
index 8445c38..0449187 100644
--- a/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
+++ b/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
@@ -817,7 +817,7 @@ namespace Lucene.Net.QueryParser.Classic
             }
             else if (regexp)
             {
-                q = GetRegexpQuery(qfield, term.image.Substring(1, term.image.Length - 1));
+                q = GetRegexpQuery(qfield, term.image.Substring(1, term.image.Length - 2));
             }
             else if (fuzzy)
             {
@@ -863,7 +863,7 @@ namespace Lucene.Net.QueryParser.Classic
                 }
                 catch (Exception ignored) { }
             }
-            return GetFieldQuery(qfield, DiscardEscapeChar(term.image.Substring(1, term.image.Length - 1)), s);
+            return GetFieldQuery(qfield, DiscardEscapeChar(term.image.Substring(1, term.image.Length - 2)), s);
         }
 
         // extracted from the .jj grammar

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/11ecedc6/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs b/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
index 495391a..235c8ae 100644
--- a/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
+++ b/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
@@ -184,7 +184,7 @@ namespace Lucene.Net.QueryParser.Classic
                     float fms = FuzzyMinSim;
                     try
                     {
-                        fms = float.Parse(fuzzySlop.image.Substring(1, fuzzySlop.image.Length - 1));
+                        fms = float.Parse(fuzzySlop.image.Substring(1, fuzzySlop.image.Length - 2));
                     }
                     catch (Exception ignored) { }
                     float value = float.Parse(termImage);


[49/50] [abbrv] lucenenet git commit: Fixed merge conflict

Posted by sy...@apache.org.
Fixed merge conflict


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/ab5c0595
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/ab5c0595
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/ab5c0595

Branch: refs/heads/master
Commit: ab5c0595b64bdcaab41a604552f10dfa695e1f78
Parents: 927b5a2 7d5c11d
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Thu Sep 8 17:50:26 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Thu Sep 8 17:50:26 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs | 2 --
 1 file changed, 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/ab5c0595/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
----------------------------------------------------------------------
diff --cc src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
index cc18216,49ef7d4..007ece1
--- a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
@@@ -540,7 -542,7 +539,6 @@@ namespace Lucene.Net.QueryParser.Surrou
          public void OptionalWeights(SrndQuery q)
          {
              Token weight = null;
- 
 -        
              while (true)
              {
                  switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)


[31/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs b/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
deleted file mode 100644
index d421ad6..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
+++ /dev/null
@@ -1,144 +0,0 @@
-\ufeffusing System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Base class for composite queries (such as AND/OR/NOT)
-    /// </summary>
-    public abstract class ComposedQuery : SrndQuery
-    {
-        public ComposedQuery(IEnumerable<SrndQuery> qs, bool operatorInfix, string opName)
-        {
-            Recompose(qs);
-            this.operatorInfix = operatorInfix;
-            this.opName = opName;
-        }
-
-        protected virtual void Recompose(IEnumerable<SrndQuery> queries)
-        {
-            if (queries.Count() < 2) throw new InvalidOperationException("Too few subqueries");
-            this.queries = new List<SrndQuery>(queries);
-        }
-
-        protected string opName;
-        public virtual string OperatorName { get { return opName; } }
-
-        protected IList<SrndQuery> queries;
-
-        public virtual IEnumerator<SrndQuery> GetSubQueriesEnumerator()
-        {
-            return queries.GetEnumerator();
-        }
-
-        public virtual int NrSubQueries { get { return queries.Count; } }
-
-        public virtual SrndQuery GetSubQuery(int qn) { return queries[qn]; }
-
-        private bool operatorInfix;
-        public virtual bool IsOperatorInfix { get { return operatorInfix; } } /* else prefix operator */
-
-        public IEnumerable<Search.Query> MakeLuceneSubQueriesField(string fn, BasicQueryFactory qf)
-        {
-            List<Search.Query> luceneSubQueries = new List<Search.Query>();
-            IEnumerator<SrndQuery> sqi = GetSubQueriesEnumerator();
-            while (sqi.MoveNext())
-            {
-                luceneSubQueries.Add((sqi.Current).MakeLuceneQueryField(fn, qf));
-            }
-            return luceneSubQueries;
-        }
-
-        public override string ToString()
-        {
-            StringBuilder r = new StringBuilder();
-            if (IsOperatorInfix)
-            {
-                InfixToString(r);
-            }
-            else
-            {
-                PrefixToString(r);
-            }
-            WeightToString(r);
-            return r.ToString();
-        }
-
-        // Override for different spacing
-        protected virtual string PrefixSeparator { get { return ", "; } }
-        protected virtual string BracketOpen { get { return "("; } }
-        protected virtual string BracketClose { get { return ")"; } }
-
-        protected virtual void InfixToString(StringBuilder r)
-        {
-            /* Brackets are possibly redundant in the result. */
-            IEnumerator<SrndQuery> sqi = GetSubQueriesEnumerator();
-            r.Append(BracketOpen);
-            if (sqi.MoveNext())
-            {
-                r.Append(sqi.Current.ToString());
-                while (sqi.MoveNext())
-                {
-                    r.Append(" ");
-                    r.Append(OperatorName); /* infix operator */
-                    r.Append(" ");
-                    r.Append(sqi.Current.ToString());
-                }
-            }
-            r.Append(BracketClose);
-        }
-
-        protected virtual void PrefixToString(StringBuilder r)
-        {
-            IEnumerator<SrndQuery> sqi = GetSubQueriesEnumerator();
-            r.Append(OperatorName); /* prefix operator */
-            r.Append(BracketOpen);
-            if (sqi.MoveNext())
-            {
-                r.Append(sqi.Current.ToString());
-                while (sqi.MoveNext())
-                {
-                    r.Append(PrefixSeparator);
-                    r.Append(sqi.Current.ToString());
-                }
-            }
-            r.Append(BracketClose);
-        }
-
-        public override bool IsFieldsSubQueryAcceptable
-        {
-            get
-            {
-                /* at least one subquery should be acceptable */
-                IEnumerator<SrndQuery> sqi = GetSubQueriesEnumerator();
-                while (sqi.MoveNext())
-                {
-                    if ((sqi.Current).IsFieldsSubQueryAcceptable)
-                    {
-                        return true;
-                    }
-                }
-                return false;
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs b/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs
deleted file mode 100644
index 1ca7a01..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs
+++ /dev/null
@@ -1,117 +0,0 @@
-\ufeffusing Lucene.Net.Index;
-using Lucene.Net.Search.Spans;
-using System;
-using System.Collections.Generic;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Factory for NEAR queries 
-    /// </summary>
-    public class DistanceQuery : ComposedQuery, IDistanceSubQuery
-    {
-        public DistanceQuery(
-            IEnumerable<SrndQuery> queries,
-            bool infix,
-            int opDistance,
-            string opName,
-            bool ordered)
-            : base(queries, infix, opName)
-        {
-            this.opDistance = opDistance; /* the distance indicated in the operator */
-            this.ordered = ordered;
-        }
-
-        private int opDistance;
-        public virtual int OpDistance { get { return opDistance; } }
-
-        private bool ordered;
-        public virtual bool QueriesOrdered { get { return ordered; } }
-
-
-        public virtual string DistanceSubQueryNotAllowed()
-        {
-            var sqi = GetSubQueriesEnumerator();
-            while (sqi.MoveNext())
-            {
-                var dsq = sqi.Current as IDistanceSubQuery;
-                if (dsq != null)
-                {
-                    string m = dsq.DistanceSubQueryNotAllowed();
-                    if (m != null)
-                    {
-                        return m;
-                    }
-                }
-                else
-                {
-                    return "Operator " + OperatorName + " does not allow subquery " + dsq.ToString();
-                }
-            }
-            return null; /* subqueries acceptable */
-        }
-
-        public virtual void AddSpanQueries(SpanNearClauseFactory sncf)
-        {
-            Search.Query snq = GetSpanNearQuery(sncf.IndexReader,
-                                  sncf.FieldName,
-                                  Weight,
-                                  sncf.BasicQueryFactory);
-            sncf.AddSpanQuery(snq);
-        }
-
-        public Search.Query GetSpanNearQuery(
-            IndexReader reader,
-            String fieldName,
-            float boost,
-            BasicQueryFactory qf)
-        {
-            SpanQuery[] spanClauses = new SpanQuery[NrSubQueries];
-            var sqi = GetSubQueriesEnumerator();
-            int qi = 0;
-            while (sqi.MoveNext())
-            {
-                SpanNearClauseFactory sncf = new SpanNearClauseFactory(reader, fieldName, qf);
-
-                ((IDistanceSubQuery)sqi.Current).AddSpanQueries(sncf);
-                if (sncf.Count == 0)
-                { /* distance operator requires all sub queries */
-                    while (sqi.MoveNext())
-                    { /* produce evt. error messages but ignore results */
-                        ((IDistanceSubQuery)sqi.Current).AddSpanQueries(sncf);
-                        sncf.Clear();
-                    }
-                    return SrndQuery.TheEmptyLcnQuery;
-                }
-
-                spanClauses[qi] = sncf.MakeSpanClause();
-                qi++;
-            }
-            SpanNearQuery r = new SpanNearQuery(spanClauses, OpDistance - 1, QueriesOrdered);
-            r.Boost = boost;
-            return r;
-        }
-
-        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
-        {
-            return new DistanceRewriteQuery(this, fieldName, qf);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/DistanceRewriteQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/DistanceRewriteQuery.cs b/Lucene.Net.QueryParser/Surround/Query/DistanceRewriteQuery.cs
deleted file mode 100644
index 3d3a108..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/DistanceRewriteQuery.cs
+++ /dev/null
@@ -1,35 +0,0 @@
-\ufeffnamespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    internal class DistanceRewriteQuery : RewriteQuery<DistanceQuery>
-    {
-        public DistanceRewriteQuery(
-            DistanceQuery srndQuery,
-            string fieldName,
-            BasicQueryFactory qf)
-            : base(srndQuery, fieldName, qf)
-        {
-        }
-
-        public override Search.Query Rewrite(Index.IndexReader reader)
-        {
-            return srndQuery.GetSpanNearQuery(reader, fieldName, Boost, qf);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/DistanceSubQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/DistanceSubQuery.cs b/Lucene.Net.QueryParser/Surround/Query/DistanceSubQuery.cs
deleted file mode 100644
index 639f9e0..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/DistanceSubQuery.cs
+++ /dev/null
@@ -1,36 +0,0 @@
-\ufeffnamespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Interface for queries that can be nested as subqueries
-    /// into a span near.
-    /// </summary>
-    public interface IDistanceSubQuery
-    {
-        /// <summary>
-        /// When distanceSubQueryNotAllowed() returns non null, the reason why the subquery
-        /// is not allowed as a distance subquery is returned.
-        /// <br>When distanceSubQueryNotAllowed() returns null addSpanNearQueries() can be used
-        /// in the creation of the span near clause for the subquery.
-        /// </summary>
-        string DistanceSubQueryNotAllowed();
-
-        void AddSpanQueries(SpanNearClauseFactory sncf);
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs b/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
deleted file mode 100644
index 912bf36..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
+++ /dev/null
@@ -1,105 +0,0 @@
-\ufeffusing System.Collections.Generic;
-using System.Linq;
-using System.Text;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Forms an OR query of the provided query across multiple fields.
-    /// </summary>
-    public class FieldsQuery : SrndQuery /* mostly untested */
-    {
-        private SrndQuery q;
-        private IEnumerable<string> fieldNames;
-        private readonly char fieldOp;
-        private readonly string OrOperatorName = "OR"; /* for expanded queries, not normally visible */
-
-        public FieldsQuery(SrndQuery q, IEnumerable<string> fieldNames, char fieldOp)
-        {
-            this.q = q;
-            this.fieldNames = new List<string>(fieldNames);
-            this.fieldOp = fieldOp;
-        }
-
-        public FieldsQuery(SrndQuery q, string fieldName, char fieldOp)
-        {
-            this.q = q;
-            var fieldNameList = new List<string>();
-            fieldNameList.Add(fieldName);
-            this.fieldNames = fieldNameList;
-            this.fieldOp = fieldOp;
-        }
-
-        public override bool IsFieldsSubQueryAcceptable
-        {
-            get { return false; }
-        }
-
-        public Search.Query MakeLuceneQueryNoBoost(BasicQueryFactory qf)
-        {
-            if (fieldNames.Count() == 1)
-            { /* single field name: no new queries needed */
-                return q.MakeLuceneQueryFieldNoBoost(fieldNames.FirstOrDefault(), qf);
-            }
-            else
-            { /* OR query over the fields */
-                List<SrndQuery> queries = new List<SrndQuery>();
-                foreach (var fieldName in fieldNames)
-                {
-                    var qc = (SrndQuery)q.Clone();
-                    queries.Add(new FieldsQuery(qc, fieldName, fieldOp));
-                }
-                OrQuery oq = new OrQuery(queries,
-                                        true /* infix OR for field names */,
-                                        OrOperatorName);
-                // System.out.println(getClass().toString() + ", fields expanded: " + oq.toString()); /* needs testing */
-                return oq.MakeLuceneQueryField(null, qf);
-            }
-        }
-
-        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
-        {
-            return MakeLuceneQueryNoBoost(qf); /* use this.fieldNames instead of fieldName */
-        }
-
-        public virtual IEnumerable<string> FieldNames { get { return fieldNames; } }
-
-        public virtual char FieldOperator { get { return fieldOp; } }
-
-        public override string ToString()
-        {
-            StringBuilder r = new StringBuilder();
-            r.Append("(");
-            FieldNamesToString(r);
-            r.Append(q.ToString());
-            r.Append(")");
-            return r.ToString();
-        }
-
-        protected virtual void FieldNamesToString(StringBuilder r)
-        {
-            foreach (var fieldName in FieldNames)
-            {
-                r.Append(fieldName);
-                r.Append(FieldOperator);
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/NotQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/NotQuery.cs b/Lucene.Net.QueryParser/Surround/Query/NotQuery.cs
deleted file mode 100644
index 30d40a8..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/NotQuery.cs
+++ /dev/null
@@ -1,48 +0,0 @@
-\ufeffusing Lucene.Net.Search;
-using System.Collections.Generic;
-using System.Linq;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Factory for prohibited clauses
-    /// </summary>
-    public class NotQuery : ComposedQuery
-    {
-        public NotQuery(IEnumerable<SrndQuery> queries, string opName)
-            : base(queries, true /* infix */, opName)
-        {
-        }
-
-        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
-        {
-            var luceneSubQueries = MakeLuceneSubQueriesField(fieldName, qf);
-            BooleanQuery bq = new BooleanQuery();
-            bq.Add(luceneSubQueries.FirstOrDefault(), BooleanClause.Occur.MUST);
-            SrndBooleanQuery.AddQueriesToBoolean(bq,
-                // FIXME: do not allow weights on prohibited subqueries.
-                    //luceneSubQueries.subList(1, luceneSubQueries.size()),
-                    luceneSubQueries.Skip(1).ToList(),
-                // later subqueries: not required, prohibited
-                    BooleanClause.Occur.MUST_NOT);
-            return bq;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs b/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs
deleted file mode 100644
index f7d0036..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs
+++ /dev/null
@@ -1,71 +0,0 @@
-\ufeffusing Lucene.Net.Search;
-using System.Collections.Generic;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Factory for disjunctions
-    /// </summary>
-    public class OrQuery : ComposedQuery, IDistanceSubQuery
-    {
-        public OrQuery(IEnumerable<SrndQuery> queries, bool infix, string opName)
-            : base(queries, infix, opName)
-        {
-        }
-
-        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
-        {
-            return SrndBooleanQuery.MakeBooleanQuery(
-                /* subqueries can be individually boosted */
-                MakeLuceneSubQueriesField(fieldName, qf), BooleanClause.Occur.SHOULD);
-        }
-
-        public virtual string DistanceSubQueryNotAllowed()
-        {
-            var sqi = GetSubQueriesEnumerator();
-            while (sqi.MoveNext())
-            {
-                SrndQuery leq = sqi.Current;
-                if (leq is IDistanceSubQuery)
-                {
-                    string m = ((IDistanceSubQuery)leq).DistanceSubQueryNotAllowed();
-                    if (m != null)
-                    {
-                        return m;
-                    }
-                }
-                else
-                {
-                    return "subquery not allowed: " + leq.ToString();
-                }
-            }
-            return null;
-        }
-
-        public virtual void AddSpanQueries(SpanNearClauseFactory sncf)
-        {
-            var sqi = GetSubQueriesEnumerator();
-            while (sqi.MoveNext())
-            {
-                ((IDistanceSubQuery)sqi.Current).AddSpanQueries(sncf);
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs b/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs
deleted file mode 100644
index 030923f..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs
+++ /dev/null
@@ -1,85 +0,0 @@
-\ufeffusing Lucene.Net.Index;
-using System;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    public abstract class RewriteQuery<SQ> : Search.Query
-    {
-        protected readonly SQ srndQuery;
-        protected readonly string fieldName;
-        protected readonly BasicQueryFactory qf;
-
-        public RewriteQuery(
-            SQ srndQuery,
-            String fieldName,
-            BasicQueryFactory qf)
-        {
-            this.srndQuery = srndQuery;
-            this.fieldName = fieldName;
-            this.qf = qf;
-        }
-
-        public abstract override Search.Query Rewrite(IndexReader reader);
-
-        public override string ToString()
-        {
-            return ToString(null);
-        }
-
-        public override string ToString(string field)
-        {
-            return GetType().Name
-                + (field == null ? "" : "(unused: " + field + ")")
-                + "(" + fieldName
-                + ", " + srndQuery.ToString()
-                + ", " + qf.ToString()
-                + ")";
-        }
-
-        public override int GetHashCode()
-        {
-            return GetType().GetHashCode()
-                ^ fieldName.GetHashCode()
-                ^ qf.GetHashCode()
-                ^ srndQuery.GetHashCode();
-        }
-
-        public override bool Equals(object obj)
-        {
-            if (obj == null)
-                return false;
-            if (!GetType().Equals(obj.GetType()))
-                return false;
-            RewriteQuery<SQ> other = (RewriteQuery<SQ>)obj;
-            return fieldName.Equals(other.fieldName)
-                && qf.Equals(other.qf)
-                && srndQuery.Equals(other.srndQuery);
-        }
-
-        /// <summary>
-        /// Not supported by this query.
-        /// </summary>
-        /// <exception cref="NotSupportedException">throws NotSupportedException always: clone is not supported.</exception>
-        public override object Clone()
-        {
-            throw new NotSupportedException();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs b/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
deleted file mode 100644
index 5e39e03..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
+++ /dev/null
@@ -1,118 +0,0 @@
-\ufeffusing Lucene.Net.Index;
-using System;
-using System.Text;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Base class for queries that expand to sets of simple terms.
-    /// </summary>
-    public abstract class SimpleTerm : SrndQuery, IDistanceSubQuery, IComparable<SimpleTerm>
-    {
-        public SimpleTerm(bool q) 
-        { 
-            quoted = q; 
-        }
-
-        private bool quoted;
-        internal bool IsQuoted { get { return quoted; } }
-
-        public virtual string Quote { get { return "\""; }}
-        public virtual string FieldOperator { get { return "/"; } }
-
-        public abstract string ToStringUnquoted();
-
-        [Obsolete("deprecated (March 2011) Not normally used, to be removed from Lucene 4.0. This class implementing Comparable is to be removed at the same time.")]
-        public int CompareTo(SimpleTerm ost)
-        {
-            /* for ordering terms and prefixes before using an index, not used */
-            return this.ToStringUnquoted().CompareTo(ost.ToStringUnquoted());
-        }
-
-        protected virtual void SuffixToString(StringBuilder r) { } /* override for prefix query */
-
-
-        public override string ToString()
-        {
-            StringBuilder r = new StringBuilder();
-            if (IsQuoted)
-            {
-                r.Append(Quote);
-            }
-            r.Append(ToStringUnquoted());
-            if (IsQuoted)
-            {
-                r.Append(Quote);
-            }
-            SuffixToString(r);
-            WeightToString(r);
-            return r.ToString();
-        }
-
-        public abstract void VisitMatchingTerms(
-                            IndexReader reader,
-                            string fieldName,
-                            IMatchingTermVisitor mtv);
-
-        /// <summary>
-        /// Callback to visit each matching term during "rewrite"
-        /// in <see cref="M:VisitMatchingTerm(Term)"/>
-        /// </summary>
-        public interface IMatchingTermVisitor
-        {
-            void VisitMatchingTerm(Term t);
-        }
-
-        public string DistanceSubQueryNotAllowed()
-        {
-            return null;
-        }
-
-        public void AddSpanQueries(SpanNearClauseFactory sncf)
-        {
-            VisitMatchingTerms(
-                sncf.IndexReader,
-                sncf.FieldName,
-                new AddSpanQueriesMatchingTermVisitor(sncf, Weight));
-        }
-
-        internal class AddSpanQueriesMatchingTermVisitor : IMatchingTermVisitor
-        {
-            private readonly SpanNearClauseFactory sncf;
-            private readonly float weight;
-
-            public AddSpanQueriesMatchingTermVisitor(SpanNearClauseFactory sncf, float weight)
-            {
-                this.sncf = sncf;
-                this.weight = weight;
-            }
-
-            public void VisitMatchingTerm(Term term)
-            {
-                sncf.AddTermWeighted(term, weight);
-            }
-        }
-
-        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
-        {
-            return new SimpleTermRewriteQuery(this, fieldName, qf);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs b/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
deleted file mode 100644
index 6502d6c..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
+++ /dev/null
@@ -1,64 +0,0 @@
-\ufeffusing Lucene.Net.Index;
-using Lucene.Net.Search;
-using System.Collections.Generic;
-using System.Linq;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    internal class SimpleTermRewriteQuery : RewriteQuery<SimpleTerm>
-    {
-        public  SimpleTermRewriteQuery(
-            SimpleTerm srndQuery,
-            string fieldName,
-            BasicQueryFactory qf)
-            : base(srndQuery, fieldName, qf)
-        {
-        }
-
-        public override Search.Query Rewrite(IndexReader reader)
-        {
-            var luceneSubQueries = new List<Search.Query>();
-            srndQuery.VisitMatchingTerms(reader, fieldName, 
-                new SimpleTermRewriteMatchingTermVisitor(luceneSubQueries, qf));
-            return (luceneSubQueries.Count == 0) ? SrndQuery.TheEmptyLcnQuery
-                : (luceneSubQueries.Count == 1) ? luceneSubQueries.First()
-                : SrndBooleanQuery.MakeBooleanQuery(
-                /* luceneSubQueries all have default weight */
-                luceneSubQueries, BooleanClause.Occur.SHOULD); /* OR the subquery terms */
-        }
-
-        internal class SimpleTermRewriteMatchingTermVisitor : SimpleTerm.IMatchingTermVisitor
-        {
-            private readonly IList<Search.Query> luceneSubQueries;
-            private readonly BasicQueryFactory qf;
-
-            public SimpleTermRewriteMatchingTermVisitor(IList<Search.Query> luceneSubQueries, BasicQueryFactory qf)
-            {
-                this.luceneSubQueries = luceneSubQueries;
-                this.qf = qf;
-            }
-
-            public void VisitMatchingTerm(Term term)
-            {
-                luceneSubQueries.Add(qf.NewTermQuery(term));
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs b/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
deleted file mode 100644
index 6cddb9c..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
+++ /dev/null
@@ -1,93 +0,0 @@
-\ufeffusing Lucene.Net.Index;
-using Lucene.Net.Search.Spans;
-using Lucene.Net.Support;
-using System;
-using System.Collections.Generic;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Factory for <see cref="SpanOrQuery"/>
-    /// </summary>
-    public class SpanNearClauseFactory
-    {
-        public SpanNearClauseFactory(IndexReader reader, string fieldName, BasicQueryFactory qf) {
-            this.reader = reader;
-            this.fieldName = fieldName;
-            this.weightBySpanQuery = new HashMap<SpanQuery, float>();
-            this.qf = qf;
-          }
-
-        private IndexReader reader;
-        private string fieldName;
-        private IDictionary<SpanQuery, float> weightBySpanQuery;
-        private BasicQueryFactory qf;
-
-        public virtual IndexReader IndexReader { get { return reader; } }
-
-        public virtual string FieldName { get { return fieldName; } }
-
-        public virtual BasicQueryFactory BasicQueryFactory { get { return qf; } }
-
-        public virtual int Count { get { return weightBySpanQuery.Count; } }
-
-        public virtual void Clear() { weightBySpanQuery.Clear(); }
-
-        protected virtual void AddSpanQueryWeighted(SpanQuery sq, float weight)
-        {
-            float w;
-            if (weightBySpanQuery.ContainsKey(sq))
-                w = weightBySpanQuery[sq] + weight;
-            else
-                w = weight;
-            weightBySpanQuery[sq] = w;
-        }
-
-        public virtual void AddTermWeighted(Term t, float weight)
-        {
-            SpanTermQuery stq = qf.NewSpanTermQuery(t);
-            /* CHECKME: wrap in Hashable...? */
-            AddSpanQueryWeighted(stq, weight);
-        }
-
-        public virtual void AddSpanQuery(Search.Query q)
-        {
-            if (q == SrndQuery.TheEmptyLcnQuery)
-                return;
-            if (!(q is SpanQuery))
-                throw new InvalidOperationException("Expected SpanQuery: " + q.ToString(FieldName));
-            AddSpanQueryWeighted((SpanQuery)q, q.Boost);
-        }
-
-        public SpanQuery MakeSpanClause()
-        {
-            List<SpanQuery> spanQueries = new List<SpanQuery>();
-            foreach (var wsq in weightBySpanQuery)
-            {
-                wsq.Key.Boost = wsq.Value;
-                spanQueries.Add(wsq.Key);
-            }
-            if (spanQueries.Count == 1)
-                return spanQueries[0];
-            else
-                return new SpanOrQuery(spanQueries.ToArray());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs b/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs
deleted file mode 100644
index 7a1a8b3..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs
+++ /dev/null
@@ -1,51 +0,0 @@
-\ufeffusing Lucene.Net.Search;
-using System;
-using System.Collections.Generic;
-using System.Linq;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    public static class SrndBooleanQuery
-    {
-        public static void AddQueriesToBoolean(
-            BooleanQuery bq,
-            IEnumerable<Search.Query> queries,
-            BooleanClause.Occur occur)
-        {
-            foreach (var query in queries)
-            {
-                bq.Add(query, occur);
-            }
-        }
-
-        public static Search.Query MakeBooleanQuery(
-            IEnumerable<Search.Query> queries,
-            BooleanClause.Occur occur)
-        {
-            if (queries.Count() <= 1)
-            {
-                throw new InvalidOperationException("Too few subqueries: " + queries.Count());
-            }
-            BooleanQuery bq = new BooleanQuery();
-            AddQueriesToBoolean(bq, queries, occur);
-            return bq;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/SrndPrefixQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SrndPrefixQuery.cs b/Lucene.Net.QueryParser/Surround/Query/SrndPrefixQuery.cs
deleted file mode 100644
index 4044b09..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/SrndPrefixQuery.cs
+++ /dev/null
@@ -1,108 +0,0 @@
-\ufeffusing Lucene.Net.Index;
-using Lucene.Net.Util;
-using System.Text;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Query that matches String prefixes
-    /// </summary>
-    public class SrndPrefixQuery : SimpleTerm
-    {
-        private readonly BytesRef prefixRef;
-        public SrndPrefixQuery(string prefix, bool quoted, char truncator)
-            : base(quoted)
-        {
-            this.prefix = prefix;
-            prefixRef = new BytesRef(prefix);
-            this.truncator = truncator;
-        }
-
-        private readonly string prefix;
-        public virtual string Prefix { get { return prefix; } }
-
-        private readonly char truncator;
-        public virtual char SuffixOperator { get { return truncator; } }
-
-        public virtual Term GetLucenePrefixTerm(string fieldName)
-        {
-            return new Term(fieldName, Prefix);
-        }
-
-        public override string ToStringUnquoted()
-        {
-            return Prefix;
-        }
-
-        protected override void SuffixToString(StringBuilder r)
-        {
-            r.Append(SuffixOperator);
-        }
-
-        public override void VisitMatchingTerms(IndexReader reader, string fieldName, IMatchingTermVisitor mtv)
-        {
-            /* inspired by PrefixQuery.rewrite(): */
-            Terms terms = MultiFields.GetTerms(reader, fieldName);
-            if (terms != null)
-            {
-                TermsEnum termsEnum = terms.Iterator(null);
-
-                bool skip = false;
-                TermsEnum.SeekStatus status = termsEnum.SeekCeil(new BytesRef(Prefix));
-                if (status == TermsEnum.SeekStatus.FOUND)
-                {
-                    mtv.VisitMatchingTerm(GetLucenePrefixTerm(fieldName));
-                }
-                else if (status == TermsEnum.SeekStatus.NOT_FOUND)
-                {
-                    if (StringHelper.StartsWith(termsEnum.Term(), prefixRef))
-                    {
-                        mtv.VisitMatchingTerm(new Term(fieldName, termsEnum.Term().Utf8ToString()));
-                    }
-                    else
-                    {
-                        skip = true;
-                    }
-                }
-                else
-                {
-                    // EOF
-                    skip = true;
-                }
-
-                if (!skip)
-                {
-                    while (true)
-                    {
-                        BytesRef text = termsEnum.Next();
-                        if (text != null && StringHelper.StartsWith(text, prefixRef))
-                        {
-                            mtv.VisitMatchingTerm(new Term(fieldName, text.Utf8ToString()));
-                        }
-                        else
-                        {
-                            break;
-                        }
-                    }
-                }
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs b/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs
deleted file mode 100644
index 57b19cc..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs
+++ /dev/null
@@ -1,149 +0,0 @@
-\ufeffusing Lucene.Net.Search;
-using Lucene.Net.Support;
-using System;
-using System.Text;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Lowest level base class for surround queries 
-    /// </summary>
-    public abstract class SrndQuery : ICloneable
-    {
-        //public SrndQuery() { }
-
-        private float weight = (float)1.0;
-        private bool weighted = false;
-
-        public virtual bool IsWeighted { get { return weighted; } }
-
-        public virtual float Weight 
-        { 
-            get { return weight; }
-            set
-            {
-                weight = value; /* as parsed from the query text */
-                weighted = true;
-            }
-        }
-
-        public virtual string WeightString { get { return Number.ToString(Weight); } }
-
-        public virtual string WeightOperator { get { return "^"; } }
-
-
-        protected virtual void WeightToString(StringBuilder r)
-        { 
-            /* append the weight part of a query */
-            if (IsWeighted)
-            {
-                r.Append(WeightOperator);
-                r.Append(WeightString);
-            }
-        }
-
-        public virtual Search.Query MakeLuceneQueryField(string fieldName, BasicQueryFactory qf)
-        {
-            Search.Query q = MakeLuceneQueryFieldNoBoost(fieldName, qf);
-            if (IsWeighted)
-            {
-                q.Boost=(Weight * q.Boost); /* weight may be at any level in a SrndQuery */
-            }
-            return q;
-        }
-
-        public abstract Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf);
-
-        /// <summary>
-        /// This method is used by <see cref="M:GetHashCode()"/> and <see cref="M:Equals(Object)"/>,
-        /// see LUCENE-2945.
-        /// </summary>
-        /// <returns></returns>
-        public abstract override string ToString();
-
-        public virtual bool IsFieldsSubQueryAcceptable { get { return true; } }
-
-        /// <summary> Shallow clone. Subclasses must override this if they
-        /// need to clone any members deeply,
-        /// </summary>
-        public virtual object Clone()
-        {
-            object clone = null;
-            try
-            {
-                clone = base.MemberwiseClone();
-            }
-            catch (Exception e)
-            {
-                throw new SystemException(e.Message, e); // shouldn't happen
-            }
-            return clone;
-        }
-
-        /// <summary>
-        /// For subclasses of <see cref="SrndQuery"/> within the package
-        /// {@link org.apache.lucene.queryparser.surround.query}
-        /// it is not necessary to override this method, <see cref="M:ToString()"/>
-        /// </summary>
-        public override int GetHashCode()
-        {
-            return GetType().GetHashCode() ^ ToString().GetHashCode();
-        }
-
-        /// <summary>
-        /// For subclasses of <see cref="SrndQuery"/> within the package
-        /// {@link org.apache.lucene.queryparser.surround.query}
-        /// it is not necessary to override this method,
-        /// @see #toString()
-        /// </summary>
-        /// <param name="obj"></param>
-        /// <returns></returns>
-        public override bool Equals(object obj)
-        {
-            if (obj == null)
-                return false;
-            if (!GetType().Equals(obj.GetType()))
-                return false;
-            return ToString().Equals(obj.ToString());
-        }
-
-        /// <summary> An empty Lucene query  </summary>
-        public readonly static Search.Query TheEmptyLcnQuery = new EmptyLcnQuery(); /* no changes allowed */ 
-  
-        internal sealed class EmptyLcnQuery : BooleanQuery
-        {
-            public override float Boost
-            {
-                get { return base.Boost; }
-                set { throw new NotSupportedException(); }
-            }
-
-            public override void Add(BooleanClause clause)
-            {
-                throw new NotSupportedException();
-            }
-
-            public override void Add(Search.Query query, BooleanClause.Occur occur)
-            {
-                throw new NotSupportedException();
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/SrndTermQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SrndTermQuery.cs b/Lucene.Net.QueryParser/Surround/Query/SrndTermQuery.cs
deleted file mode 100644
index 45885a1..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/SrndTermQuery.cs
+++ /dev/null
@@ -1,63 +0,0 @@
-\ufeffusing Lucene.Net.Index;
-using Lucene.Net.Util;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Simple single-term clause
-    /// </summary>
-    public class SrndTermQuery : SimpleTerm
-    {
-        public SrndTermQuery(string termText, bool quoted)
-            : base(quoted)
-        {
-            this.termText = termText;
-        }
-
-        private readonly string termText;
-        public virtual string TermText { get { return termText; } }
-
-        public virtual Term GetLuceneTerm(string fieldName)
-        {
-            return new Term(fieldName, TermText);
-        }
-
-        public override string ToStringUnquoted()
-        {
-            return TermText;
-        }
-
-        public override void VisitMatchingTerms(IndexReader reader, string fieldName, IMatchingTermVisitor mtv)
-        {
-            /* check term presence in index here for symmetry with other SimpleTerm's */
-            Terms terms = MultiFields.GetTerms(reader, fieldName);
-            if (terms != null)
-            {
-                TermsEnum termsEnum = terms.Iterator(null);
-
-                TermsEnum.SeekStatus status = termsEnum.SeekCeil(new BytesRef(TermText));
-                if (status == TermsEnum.SeekStatus.FOUND)
-                {
-                    mtv.VisitMatchingTerm(GetLuceneTerm(fieldName));
-                }
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/SrndTruncQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SrndTruncQuery.cs b/Lucene.Net.QueryParser/Surround/Query/SrndTruncQuery.cs
deleted file mode 100644
index 5ed9ff3..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/SrndTruncQuery.cs
+++ /dev/null
@@ -1,139 +0,0 @@
-\ufeffusing Lucene.Net.Index;
-using Lucene.Net.Util;
-using System;
-using System.Text;
-using System.Text.RegularExpressions;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    public class SrndTruncQuery : SimpleTerm
-    {
-        public SrndTruncQuery(string truncated, char unlimited, char mask)
-            : base(false) /* not quoted */
-        {
-            this.truncated = truncated;
-            this.unlimited = unlimited;
-            this.mask = mask;
-            TruncatedToPrefixAndPattern();
-        }
-
-        private readonly string truncated;
-        private readonly char unlimited;
-        private readonly char mask;
-
-        private string prefix;
-        private BytesRef prefixRef;
-        private Regex pattern;
-
-        public virtual string Truncated { get { return truncated; } }
-
-        public override string ToStringUnquoted()
-        {
-            return Truncated;
-        }
-
-        protected virtual bool MatchingChar(char c)
-        {
-            return (c != unlimited) && (c != mask);
-        }
-
-        protected virtual void AppendRegExpForChar(char c, StringBuilder re)
-        {
-            if (c == unlimited)
-                re.Append(".*");
-            else if (c == mask)
-                re.Append(".");
-            else
-                re.Append(c);
-        }
-
-        protected virtual void TruncatedToPrefixAndPattern()
-        {
-            int i = 0;
-            while ((i < truncated.Length) && MatchingChar(truncated[i]))
-            {
-                i++;
-            }
-            prefix = truncated.Substring(0, i);
-            prefixRef = new BytesRef(prefix);
-
-            StringBuilder re = new StringBuilder();
-            while (i < truncated.Length)
-            {
-                AppendRegExpForChar(truncated[i], re);
-                i++;
-            }
-            pattern = new Regex(re.ToString(), RegexOptions.Compiled);
-        }
-
-        // TODO: Finish implementation
-        public override void VisitMatchingTerms(IndexReader reader, string fieldName, SimpleTerm.IMatchingTermVisitor mtv)
-        {
-            throw new NotImplementedException("Need to translate this from Java's whacky RegEx syntax");
-            //int prefixLength = prefix.Length;
-            //Terms terms = MultiFields.GetTerms(reader, fieldName);
-            //if (terms != null)
-            //{
-            //    MatchCollection matcher = pattern.Matches("");
-            //    try
-            //    {
-            //        TermsEnum termsEnum = terms.Iterator(null);
-
-            //        TermsEnum.SeekStatus status = termsEnum.SeekCeil(prefixRef);
-            //        BytesRef text;
-            //        if (status == TermsEnum.SeekStatus.FOUND)
-            //        {
-            //            text = prefixRef;
-            //        }
-            //        else if (status == TermsEnum.SeekStatus.NOT_FOUND)
-            //        {
-            //            text = termsEnum.Term();
-            //        }
-            //        else
-            //        {
-            //            text = null;
-            //        }
-
-            //        while (text != null)
-            //        {
-            //            if (text != null && StringHelper.StartsWith(text, prefixRef))
-            //            {
-            //                string textString = text.Utf8ToString();
-            //                matcher.Reset(textString.Substring(prefixLength));
-            //                if (matcher.Success)
-            //                {
-            //                    mtv.VisitMatchingTerm(new Term(fieldName, textString));
-            //                }
-            //            }
-            //            else
-            //            {
-            //                break;
-            //            }
-            //            text = termsEnum.Next();
-            //        }
-            //    }
-            //    finally
-            //    {
-            //        matcher.Reset();
-            //    }
-            //}
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/TooManyBasicQueries.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/TooManyBasicQueries.cs b/Lucene.Net.QueryParser/Surround/Query/TooManyBasicQueries.cs
deleted file mode 100644
index 27f313c..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/TooManyBasicQueries.cs
+++ /dev/null
@@ -1,30 +0,0 @@
-\ufeffnamespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Exception thrown when <see cref="BasicQueryFactory"/> would exceed the limit
-    /// of query clauses.
-    /// </summary>
-    public class TooManyBasicQueries : System.IO.IOException
-    {
-        public TooManyBasicQueries(int maxBasicQueries)
-            : base("Exceeded maximum of " + maxBasicQueries + " basic queries.")
-        { }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs b/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs
deleted file mode 100644
index 10756cf..0000000
--- a/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs
+++ /dev/null
@@ -1,341 +0,0 @@
-\ufeffusing System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-using Lucene.Net.Analysis;
-using Lucene.Net.Analysis.Tokenattributes;
-using Lucene.Net.Documents;
-using Lucene.Net.Index;
-using Lucene.Net.QueryParser.Classic;
-using Lucene.Net.Search;
-using Lucene.Net.Store;
-using Lucene.Net.Util;
-using NUnit.Framework;
-
-namespace Lucene.Net.QueryParser.Analyzing
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    [TestFixture]
-    public class TestAnalyzingQueryParser : LuceneTestCase
-    {
-        private readonly static string FIELD = "field";
-
-        private Analyzer a;
-
-        private string[] wildcardInput;
-        private string[] wildcardExpected;
-        private string[] prefixInput;
-        private string[] prefixExpected;
-        private string[] rangeInput;
-        private string[] rangeExpected;
-        private string[] fuzzyInput;
-        private string[] fuzzyExpected;
-
-        private IDictionary<string, string> wildcardEscapeHits = new Dictionary<string, string>();
-        private IDictionary<string, string> wildcardEscapeMisses = new Dictionary<string, string>();
-
-        public override void SetUp()
-        {
-            base.SetUp();
-            wildcardInput = new string[] { "*bersetzung �ber*ung",
-                "M�tley Cr\u00fce M�tl?* Cr�?", "Ren�e Zellweger Ren?? Zellw?ger" };
-            wildcardExpected = new string[] { "*bersetzung uber*ung", "motley crue motl?* cru?",
-                "renee zellweger ren?? zellw?ger" };
-
-            prefixInput = new string[] { "�bersetzung �bersetz*",
-                "M�tley Cr�e M�tl* cr�*", "Ren�? Zellw*" };
-            prefixExpected = new string[] { "ubersetzung ubersetz*", "motley crue motl* cru*",
-                "rene? zellw*" };
-
-            rangeInput = new string[] { "[aa TO bb]", "{Ana�s TO Zo�}" };
-            rangeExpected = new string[] { "[aa TO bb]", "{anais TO zoe}" };
-
-            fuzzyInput = new string[] { "�bersetzung �bersetzung~0.9",
-                "M�tley Cr�e M�tley~0.75 Cr�e~0.5",
-                "Ren�e Zellweger Ren�e~0.9 Zellweger~" };
-            fuzzyExpected = new string[] { "ubersetzung ubersetzung~1",
-                "motley crue motley~1 crue~2", "renee zellweger renee~0 zellweger~2" };
-
-            wildcardEscapeHits["m�*tley"] = "moatley";
-
-            // need to have at least one genuine wildcard to trigger the wildcard analysis
-            // hence the * before the y
-            wildcardEscapeHits["m�\\*tl*y"] = "mo*tley";
-
-            // escaped backslash then true wildcard
-            wildcardEscapeHits["m�\\\\*tley"] = "mo\\atley";
-
-            // escaped wildcard then true wildcard
-            wildcardEscapeHits["m�\\??ley"] = "mo?tley";
-
-            // the first is an escaped * which should yield a miss
-            wildcardEscapeMisses["m�\\*tl*y"] = "moatley";
-
-            a = new ASCIIAnalyzer();
-        }
-
-        [Test]
-        public void TestSingleChunkExceptions()
-        {
-            bool ex = false;
-            string termStr = "the*tre";
-
-            Analyzer stopsAnalyzer = new MockAnalyzer
-                (Random(), MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET);
-            try
-            {
-                string q = ParseWithAnalyzingQueryParser(termStr, stopsAnalyzer, true);
-            }
-            catch (ParseException e)
-            {
-                if (e.Message.Contains("returned nothing"))
-                {
-                    ex = true;
-                }
-            }
-            assertEquals("Should have returned nothing", true, ex);
-            ex = false;
-
-            AnalyzingQueryParser qp = new AnalyzingQueryParser(TEST_VERSION_CURRENT, FIELD, a);
-            try
-            {
-                qp.AnalyzeSingleChunk(FIELD, "", "not a single chunk");
-            }
-            catch (ParseException e)
-            {
-                if (e.Message.Contains("multiple terms"))
-                {
-                    ex = true;
-                }
-            }
-            assertEquals("Should have produced multiple terms", true, ex);
-        }
-
-        [Test]
-        public void TestWildcardAlone()
-        {
-            //seems like crazy edge case, but can be useful in concordance 
-            bool pex = false;
-            try
-            {
-                Query q = GetAnalyzedQuery("*", a, false);
-            }
-            catch (ParseException e)
-            {
-                pex = true;
-            }
-            assertEquals("Wildcard alone with allowWildcard=false", true, pex);
-
-            pex = false;
-            try
-            {
-                String qString = ParseWithAnalyzingQueryParser("*", a, true);
-                assertEquals("Every word", "*", qString);
-            }
-            catch (ParseException e)
-            {
-                pex = true;
-            }
-
-            assertEquals("Wildcard alone with allowWildcard=true", false, pex);
-        }
-
-        [Test]
-        public void TestWildCardEscapes()
-        {
-            foreach (var entry in wildcardEscapeHits)
-            {
-                Query q = GetAnalyzedQuery(entry.Key, a, false);
-                assertEquals("WildcardEscapeHits: " + entry.Key, true, IsAHit(q, entry.Value, a));
-            }
-            foreach (var entry in wildcardEscapeMisses)
-            {
-                Query q = GetAnalyzedQuery(entry.Key, a, false);
-                assertEquals("WildcardEscapeMisses: " + entry.Key, false, IsAHit(q, entry.Value, a));
-            }
-        }
-
-        [Test]
-        public void TestWildCardQueryNoLeadingAllowed()
-        {
-            bool ex = false;
-            try
-            {
-                string q = ParseWithAnalyzingQueryParser(wildcardInput[0], a, false);
-
-            }
-            catch (ParseException e)
-            {
-                ex = true;
-            }
-            assertEquals("Testing initial wildcard not allowed",
-                true, ex);
-        }
-
-        [Test]
-        public void TestWildCardQuery()
-        {
-            for (int i = 0; i < wildcardInput.Length; i++)
-            {
-                assertEquals("Testing wildcards with analyzer " + a.GetType() + ", input string: "
-                    + wildcardInput[i], wildcardExpected[i], ParseWithAnalyzingQueryParser(wildcardInput[i], a, true));
-            }
-        }
-
-        [Test]
-        public void TestPrefixQuery()
-        {
-            for (int i = 0; i < prefixInput.Length; i++)
-            {
-                assertEquals("Testing prefixes with analyzer " + a.GetType() + ", input string: "
-                    + prefixInput[i], prefixExpected[i], ParseWithAnalyzingQueryParser(prefixInput[i], a, false));
-            }
-        }
-
-        [Test]
-        public void TestRangeQuery()
-        {
-            for (int i = 0; i < rangeInput.Length; i++)
-            {
-                assertEquals("Testing ranges with analyzer " + a.GetType() + ", input string: "
-                    + rangeInput[i], rangeExpected[i], ParseWithAnalyzingQueryParser(rangeInput[i], a, false));
-            }
-        }
-
-        [Test]
-        public void TestFuzzyQuery()
-        {
-            for (int i = 0; i < fuzzyInput.Length; i++)
-            {
-                assertEquals("Testing fuzzys with analyzer " + a.GetType() + ", input string: "
-                  + fuzzyInput[i], fuzzyExpected[i], ParseWithAnalyzingQueryParser(fuzzyInput[i], a, false));
-            }
-        }
-
-
-        private string ParseWithAnalyzingQueryParser(string s, Analyzer a, bool allowLeadingWildcard)
-        {
-            Query q = GetAnalyzedQuery(s, a, allowLeadingWildcard);
-            return q.ToString(FIELD);
-        }
-
-        private Query GetAnalyzedQuery(string s, Analyzer a, bool allowLeadingWildcard)
-        {
-            AnalyzingQueryParser qp = new AnalyzingQueryParser(TEST_VERSION_CURRENT, FIELD, a);
-            qp.AllowLeadingWildcard = allowLeadingWildcard;
-            Query q = qp.Parse(s);
-            return q;
-        }
-
-        internal sealed class FoldingFilter : TokenFilter
-        {
-            private readonly ICharTermAttribute termAtt;
-
-            public FoldingFilter(TokenStream input)
-                : base(input)
-            {
-                termAtt = AddAttribute<ICharTermAttribute>();
-            }
-
-            public sealed override bool IncrementToken()
-            {
-                if (input.IncrementToken())
-                {
-                    char[] term = termAtt.Buffer();
-                    for (int i = 0; i < term.Length; i++)
-                        switch (term[i])
-                        {
-                            case '�':
-                                term[i] = 'u';
-                                break;
-                            case '�':
-                                term[i] = 'o';
-                                break;
-                            case '�':
-                                term[i] = 'e';
-                                break;
-                            case '�':
-                                term[i] = 'i';
-                                break;
-                        }
-                    return true;
-                }
-                else
-                {
-                    return false;
-                }
-            }
-        }
-
-        internal sealed class ASCIIAnalyzer : Analyzer
-        {
-
-            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
-            {
-                Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
-                return new TokenStreamComponents(result, new FoldingFilter(result));
-            }
-        }
-
-        // LUCENE-4176
-        [Test]
-        public void TestByteTerms()
-        {
-            string s = "\u0e40\u0e02";
-            Analyzer analyzer = new MockBytesAnalyzer();
-            Classic.QueryParser qp = new AnalyzingQueryParser(TEST_VERSION_CURRENT, FIELD, analyzer);
-            Query q = qp.Parse("[\u0e40\u0e02 TO \u0e40\u0e02]");
-            assertEquals(true, IsAHit(q, s, analyzer));
-        }
-
-        private bool IsAHit(Query q, string content, Analyzer analyzer)
-        {
-            int hits;
-            using (Directory ramDir = NewDirectory())
-            {
-                using (RandomIndexWriter writer = new RandomIndexWriter(Random(), ramDir, analyzer))
-                {
-                    Document doc = new Document();
-                    FieldType fieldType = new FieldType();
-                    fieldType.Indexed = (true);
-                    fieldType.Tokenized = (true);
-                    fieldType.Stored = (true);
-                    Field field = new Field(FIELD, content, fieldType);
-                    doc.Add(field);
-                    writer.AddDocument(doc);
-                }
-                using (DirectoryReader ir = DirectoryReader.Open(ramDir))
-                {
-                    IndexSearcher @is = new IndexSearcher(ir);
-
-                    hits = @is.Search(q, 10).TotalHits;
-                }
-            }
-            if (hits == 1)
-            {
-                return true;
-            }
-            else
-            {
-                return false;
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs b/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
deleted file mode 100644
index 350f181..0000000
--- a/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
+++ /dev/null
@@ -1,278 +0,0 @@
-\ufeffusing System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-using NUnit.Framework;
-using Lucene.Net.Analysis;
-using Lucene.Net.Analysis.Tokenattributes;
-using Lucene.Net.Search;
-using Lucene.Net.Util;
-
-namespace Lucene.Net.QueryParser.Classic
-{
-    [TestFixture]
-    public class TestMultiAnalyzer_ : BaseTokenStreamTestCase
-    {
-
-        private static int multiToken = 0;
-
-        [Test]
-        public void TestMultiAnalyzer()
-        {
-
-            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "", new MultiAnalyzer());
-
-            // trivial, no multiple tokens:
-            assertEquals("foo", qp.Parse("foo").toString());
-            assertEquals("foo", qp.Parse("\"foo\"").toString());
-            assertEquals("foo foobar", qp.Parse("foo foobar").toString());
-            assertEquals("\"foo foobar\"", qp.Parse("\"foo foobar\"").toString());
-            assertEquals("\"foo foobar blah\"", qp.Parse("\"foo foobar blah\"").toString());
-
-            // two tokens at the same position:
-            assertEquals("(multi multi2) foo", qp.Parse("multi foo").toString());
-            assertEquals("foo (multi multi2)", qp.Parse("foo multi").toString());
-            assertEquals("(multi multi2) (multi multi2)", qp.Parse("multi multi").toString());
-            assertEquals("+(foo (multi multi2)) +(bar (multi multi2))",
-                qp.Parse("+(foo multi) +(bar multi)").toString());
-            assertEquals("+(foo (multi multi2)) field:\"bar (multi multi2)\"",
-                qp.Parse("+(foo multi) field:\"bar multi\"").toString());
-
-            // phrases:
-            assertEquals("\"(multi multi2) foo\"", qp.Parse("\"multi foo\"").toString());
-            assertEquals("\"foo (multi multi2)\"", qp.Parse("\"foo multi\"").toString());
-            assertEquals("\"foo (multi multi2) foobar (multi multi2)\"",
-                qp.Parse("\"foo multi foobar multi\"").toString());
-
-            // fields:
-            assertEquals("(field:multi field:multi2) field:foo", qp.Parse("field:multi field:foo").toString());
-            assertEquals("field:\"(multi multi2) foo\"", qp.Parse("field:\"multi foo\"").toString());
-
-            // three tokens at one position:
-            assertEquals("triplemulti multi3 multi2", qp.Parse("triplemulti").toString());
-            assertEquals("foo (triplemulti multi3 multi2) foobar",
-                qp.Parse("foo triplemulti foobar").toString());
-
-            // phrase with non-default slop:
-            assertEquals("\"(multi multi2) foo\"~10", qp.Parse("\"multi foo\"~10").toString());
-
-            // phrase with non-default boost:
-            assertEquals("\"(multi multi2) foo\"^2.0", qp.Parse("\"multi foo\"^2").toString());
-
-            // phrase after changing default slop
-            qp.PhraseSlop=(99);
-            assertEquals("\"(multi multi2) foo\"~99 bar",
-                         qp.Parse("\"multi foo\" bar").toString());
-            assertEquals("\"(multi multi2) foo\"~99 \"foo bar\"~2",
-                         qp.Parse("\"multi foo\" \"foo bar\"~2").toString());
-            qp.PhraseSlop=(0);
-
-            // non-default operator:
-            qp.DefaultOperator=(QueryParserBase.AND_OPERATOR);
-            assertEquals("+(multi multi2) +foo", qp.Parse("multi foo").toString());
-
-        }
-
-        [Test]
-        public void TestMultiAnalyzerWithSubclassOfQueryParser()
-        {
-
-            DumbQueryParser qp = new DumbQueryParser("", new MultiAnalyzer());
-            qp.PhraseSlop = (99); // modified default slop
-
-            // direct call to (super's) getFieldQuery to demonstrate differnce
-            // between phrase and multiphrase with modified default slop
-            assertEquals("\"foo bar\"~99",
-                         qp.GetSuperFieldQuery("", "foo bar", true).toString());
-            assertEquals("\"(multi multi2) bar\"~99",
-                         qp.GetSuperFieldQuery("", "multi bar", true).toString());
-
-
-            // ask sublcass to parse phrase with modified default slop
-            assertEquals("\"(multi multi2) foo\"~99 bar",
-                         qp.Parse("\"multi foo\" bar").toString());
-
-        }
-
-        [Test]
-        public void TestPosIncrementAnalyzer()
-        {
-            QueryParser qp = new QueryParser(LuceneVersion.LUCENE_40, "", new PosIncrementAnalyzer());
-            assertEquals("quick brown", qp.Parse("the quick brown").toString());
-            assertEquals("quick brown fox", qp.Parse("the quick brown fox").toString());
-        }
-
-        /// <summary>
-        /// Expands "multi" to "multi" and "multi2", both at the same position,
-        /// and expands "triplemulti" to "triplemulti", "multi3", and "multi2".  
-        /// </summary>
-        private class MultiAnalyzer : Analyzer
-        {
-            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
-            {
-                Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
-                return new TokenStreamComponents(result, new TestFilter(result));
-            }
-        }
-
-        private sealed class TestFilter : TokenFilter
-        {
-
-            private string prevType;
-            private int prevStartOffset;
-            private int prevEndOffset;
-
-            private readonly ICharTermAttribute termAtt;
-            private readonly IPositionIncrementAttribute posIncrAtt;
-            private readonly IOffsetAttribute offsetAtt;
-            private readonly ITypeAttribute typeAtt;
-
-            public TestFilter(TokenStream @in)
-                : base(@in)
-            {
-                termAtt = AddAttribute<ICharTermAttribute>();
-                posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
-                offsetAtt = AddAttribute<IOffsetAttribute>();
-                typeAtt = AddAttribute<ITypeAttribute>();
-            }
-
-            public override sealed bool IncrementToken()
-            {
-                if (multiToken > 0)
-                {
-                    termAtt.SetEmpty().Append("multi" + (multiToken + 1));
-                    offsetAtt.SetOffset(prevStartOffset, prevEndOffset);
-                    typeAtt.Type = (prevType);
-                    posIncrAtt.PositionIncrement = (0);
-                    multiToken--;
-                    return true;
-                }
-                else
-                {
-                    bool next = input.IncrementToken();
-                    if (!next)
-                    {
-                        return false;
-                    }
-                    prevType = typeAtt.Type;
-                    prevStartOffset = offsetAtt.StartOffset();
-                    prevEndOffset = offsetAtt.EndOffset();
-                    string text = termAtt.toString();
-                    if (text.equals("triplemulti"))
-                    {
-                        multiToken = 2;
-                        return true;
-                    }
-                    else if (text.equals("multi"))
-                    {
-                        multiToken = 1;
-                        return true;
-                    }
-                    else
-                    {
-                        return true;
-                    }
-                }
-            }
-
-            public override void Reset()
-            {
-                base.Reset();
-                this.prevType = null;
-                this.prevStartOffset = 0;
-                this.prevEndOffset = 0;
-            }
-        }
-
-        /// <summary>
-        /// Analyzes "the quick brown" as: quick(incr=2) brown(incr=1).
-        /// Does not work correctly for input other than "the quick brown ...".
-        /// </summary>
-        private class PosIncrementAnalyzer : Analyzer
-        {
-            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
-            {
-                Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
-                return new TokenStreamComponents(result, new TestPosIncrementFilter(result));
-            }
-        }
-
-        private sealed class TestPosIncrementFilter : TokenFilter
-        {
-            ICharTermAttribute termAtt;
-            IPositionIncrementAttribute posIncrAtt;
-
-            public TestPosIncrementFilter(TokenStream @in)
-                : base(@in)
-            {
-                termAtt = AddAttribute<ICharTermAttribute>();
-                posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
-            }
-
-            public override sealed bool IncrementToken()
-            {
-                while (input.IncrementToken())
-                {
-                    if (termAtt.toString().equals("the"))
-                    {
-                        // stopword, do nothing
-                    }
-                    else if (termAtt.toString().equals("quick"))
-                    {
-                        posIncrAtt.PositionIncrement = (2);
-                        return true;
-                    }
-                    else
-                    {
-                        posIncrAtt.PositionIncrement = (1);
-                        return true;
-                    }
-                }
-                return false;
-            }
-        }
-
-        /// <summary>
-        /// a very simple subclass of QueryParser
-        /// </summary>
-        private sealed class DumbQueryParser : QueryParser
-        {
-            public DumbQueryParser(string f, Analyzer a)
-                : base(TEST_VERSION_CURRENT, f, a)
-            {
-            }
-
-            // expose super's version 
-            public Query GetSuperFieldQuery(string f, string t, bool quoted)
-            {
-                return base.GetFieldQuery(f, t, quoted);
-            }
-
-            // wrap super's version
-            protected internal override Query GetFieldQuery(string field, string queryText, bool quoted)
-            {
-                return new DumbQueryWrapper(GetSuperFieldQuery(field, queryText, quoted));
-            }
-        }
-
-        /// <summary>
-        /// A very simple wrapper to prevent instanceof checks but uses
-        /// the toString of the query it wraps.
-        /// </summary>
-        private sealed class DumbQueryWrapper : Query
-        {
-            private Query q;
-            public DumbQueryWrapper(Query q)
-            {
-                this.q = q;
-            }
-
-            public override string ToString(string field)
-            {
-                return q.ToString(field);
-            }
-        }
-
-    }
-}


[22/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
new file mode 100644
index 0000000..d421ad6
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
@@ -0,0 +1,144 @@
+\ufeffusing System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Base class for composite queries (such as AND/OR/NOT)
+    /// </summary>
+    public abstract class ComposedQuery : SrndQuery
+    {
+        public ComposedQuery(IEnumerable<SrndQuery> qs, bool operatorInfix, string opName)
+        {
+            Recompose(qs);
+            this.operatorInfix = operatorInfix;
+            this.opName = opName;
+        }
+
+        protected virtual void Recompose(IEnumerable<SrndQuery> queries)
+        {
+            if (queries.Count() < 2) throw new InvalidOperationException("Too few subqueries");
+            this.queries = new List<SrndQuery>(queries);
+        }
+
+        protected string opName;
+        public virtual string OperatorName { get { return opName; } }
+
+        protected IList<SrndQuery> queries;
+
+        public virtual IEnumerator<SrndQuery> GetSubQueriesEnumerator()
+        {
+            return queries.GetEnumerator();
+        }
+
+        public virtual int NrSubQueries { get { return queries.Count; } }
+
+        public virtual SrndQuery GetSubQuery(int qn) { return queries[qn]; }
+
+        private bool operatorInfix;
+        public virtual bool IsOperatorInfix { get { return operatorInfix; } } /* else prefix operator */
+
+        public IEnumerable<Search.Query> MakeLuceneSubQueriesField(string fn, BasicQueryFactory qf)
+        {
+            List<Search.Query> luceneSubQueries = new List<Search.Query>();
+            IEnumerator<SrndQuery> sqi = GetSubQueriesEnumerator();
+            while (sqi.MoveNext())
+            {
+                luceneSubQueries.Add((sqi.Current).MakeLuceneQueryField(fn, qf));
+            }
+            return luceneSubQueries;
+        }
+
+        public override string ToString()
+        {
+            StringBuilder r = new StringBuilder();
+            if (IsOperatorInfix)
+            {
+                InfixToString(r);
+            }
+            else
+            {
+                PrefixToString(r);
+            }
+            WeightToString(r);
+            return r.ToString();
+        }
+
+        // Override for different spacing
+        protected virtual string PrefixSeparator { get { return ", "; } }
+        protected virtual string BracketOpen { get { return "("; } }
+        protected virtual string BracketClose { get { return ")"; } }
+
+        protected virtual void InfixToString(StringBuilder r)
+        {
+            /* Brackets are possibly redundant in the result. */
+            IEnumerator<SrndQuery> sqi = GetSubQueriesEnumerator();
+            r.Append(BracketOpen);
+            if (sqi.MoveNext())
+            {
+                r.Append(sqi.Current.ToString());
+                while (sqi.MoveNext())
+                {
+                    r.Append(" ");
+                    r.Append(OperatorName); /* infix operator */
+                    r.Append(" ");
+                    r.Append(sqi.Current.ToString());
+                }
+            }
+            r.Append(BracketClose);
+        }
+
+        protected virtual void PrefixToString(StringBuilder r)
+        {
+            IEnumerator<SrndQuery> sqi = GetSubQueriesEnumerator();
+            r.Append(OperatorName); /* prefix operator */
+            r.Append(BracketOpen);
+            if (sqi.MoveNext())
+            {
+                r.Append(sqi.Current.ToString());
+                while (sqi.MoveNext())
+                {
+                    r.Append(PrefixSeparator);
+                    r.Append(sqi.Current.ToString());
+                }
+            }
+            r.Append(BracketClose);
+        }
+
+        public override bool IsFieldsSubQueryAcceptable
+        {
+            get
+            {
+                /* at least one subquery should be acceptable */
+                IEnumerator<SrndQuery> sqi = GetSubQueriesEnumerator();
+                while (sqi.MoveNext())
+                {
+                    if ((sqi.Current).IsFieldsSubQueryAcceptable)
+                    {
+                        return true;
+                    }
+                }
+                return false;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs
new file mode 100644
index 0000000..1ca7a01
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs
@@ -0,0 +1,117 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Search.Spans;
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Factory for NEAR queries 
+    /// </summary>
+    public class DistanceQuery : ComposedQuery, IDistanceSubQuery
+    {
+        public DistanceQuery(
+            IEnumerable<SrndQuery> queries,
+            bool infix,
+            int opDistance,
+            string opName,
+            bool ordered)
+            : base(queries, infix, opName)
+        {
+            this.opDistance = opDistance; /* the distance indicated in the operator */
+            this.ordered = ordered;
+        }
+
+        private int opDistance;
+        public virtual int OpDistance { get { return opDistance; } }
+
+        private bool ordered;
+        public virtual bool QueriesOrdered { get { return ordered; } }
+
+
+        public virtual string DistanceSubQueryNotAllowed()
+        {
+            var sqi = GetSubQueriesEnumerator();
+            while (sqi.MoveNext())
+            {
+                var dsq = sqi.Current as IDistanceSubQuery;
+                if (dsq != null)
+                {
+                    string m = dsq.DistanceSubQueryNotAllowed();
+                    if (m != null)
+                    {
+                        return m;
+                    }
+                }
+                else
+                {
+                    return "Operator " + OperatorName + " does not allow subquery " + dsq.ToString();
+                }
+            }
+            return null; /* subqueries acceptable */
+        }
+
+        public virtual void AddSpanQueries(SpanNearClauseFactory sncf)
+        {
+            Search.Query snq = GetSpanNearQuery(sncf.IndexReader,
+                                  sncf.FieldName,
+                                  Weight,
+                                  sncf.BasicQueryFactory);
+            sncf.AddSpanQuery(snq);
+        }
+
+        public Search.Query GetSpanNearQuery(
+            IndexReader reader,
+            String fieldName,
+            float boost,
+            BasicQueryFactory qf)
+        {
+            SpanQuery[] spanClauses = new SpanQuery[NrSubQueries];
+            var sqi = GetSubQueriesEnumerator();
+            int qi = 0;
+            while (sqi.MoveNext())
+            {
+                SpanNearClauseFactory sncf = new SpanNearClauseFactory(reader, fieldName, qf);
+
+                ((IDistanceSubQuery)sqi.Current).AddSpanQueries(sncf);
+                if (sncf.Count == 0)
+                { /* distance operator requires all sub queries */
+                    while (sqi.MoveNext())
+                    { /* produce evt. error messages but ignore results */
+                        ((IDistanceSubQuery)sqi.Current).AddSpanQueries(sncf);
+                        sncf.Clear();
+                    }
+                    return SrndQuery.TheEmptyLcnQuery;
+                }
+
+                spanClauses[qi] = sncf.MakeSpanClause();
+                qi++;
+            }
+            SpanNearQuery r = new SpanNearQuery(spanClauses, OpDistance - 1, QueriesOrdered);
+            r.Boost = boost;
+            return r;
+        }
+
+        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
+        {
+            return new DistanceRewriteQuery(this, fieldName, qf);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/DistanceRewriteQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/DistanceRewriteQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/DistanceRewriteQuery.cs
new file mode 100644
index 0000000..3d3a108
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/DistanceRewriteQuery.cs
@@ -0,0 +1,35 @@
+\ufeffnamespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    internal class DistanceRewriteQuery : RewriteQuery<DistanceQuery>
+    {
+        public DistanceRewriteQuery(
+            DistanceQuery srndQuery,
+            string fieldName,
+            BasicQueryFactory qf)
+            : base(srndQuery, fieldName, qf)
+        {
+        }
+
+        public override Search.Query Rewrite(Index.IndexReader reader)
+        {
+            return srndQuery.GetSpanNearQuery(reader, fieldName, Boost, qf);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/DistanceSubQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/DistanceSubQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/DistanceSubQuery.cs
new file mode 100644
index 0000000..639f9e0
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/DistanceSubQuery.cs
@@ -0,0 +1,36 @@
+\ufeffnamespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Interface for queries that can be nested as subqueries
+    /// into a span near.
+    /// </summary>
+    public interface IDistanceSubQuery
+    {
+        /// <summary>
+        /// When distanceSubQueryNotAllowed() returns non null, the reason why the subquery
+        /// is not allowed as a distance subquery is returned.
+        /// <br>When distanceSubQueryNotAllowed() returns null addSpanNearQueries() can be used
+        /// in the creation of the span near clause for the subquery.
+        /// </summary>
+        string DistanceSubQueryNotAllowed();
+
+        void AddSpanQueries(SpanNearClauseFactory sncf);
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
new file mode 100644
index 0000000..912bf36
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
@@ -0,0 +1,105 @@
+\ufeffusing System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Forms an OR query of the provided query across multiple fields.
+    /// </summary>
+    public class FieldsQuery : SrndQuery /* mostly untested */
+    {
+        private SrndQuery q;
+        private IEnumerable<string> fieldNames;
+        private readonly char fieldOp;
+        private readonly string OrOperatorName = "OR"; /* for expanded queries, not normally visible */
+
+        public FieldsQuery(SrndQuery q, IEnumerable<string> fieldNames, char fieldOp)
+        {
+            this.q = q;
+            this.fieldNames = new List<string>(fieldNames);
+            this.fieldOp = fieldOp;
+        }
+
+        public FieldsQuery(SrndQuery q, string fieldName, char fieldOp)
+        {
+            this.q = q;
+            var fieldNameList = new List<string>();
+            fieldNameList.Add(fieldName);
+            this.fieldNames = fieldNameList;
+            this.fieldOp = fieldOp;
+        }
+
+        public override bool IsFieldsSubQueryAcceptable
+        {
+            get { return false; }
+        }
+
+        public Search.Query MakeLuceneQueryNoBoost(BasicQueryFactory qf)
+        {
+            if (fieldNames.Count() == 1)
+            { /* single field name: no new queries needed */
+                return q.MakeLuceneQueryFieldNoBoost(fieldNames.FirstOrDefault(), qf);
+            }
+            else
+            { /* OR query over the fields */
+                List<SrndQuery> queries = new List<SrndQuery>();
+                foreach (var fieldName in fieldNames)
+                {
+                    var qc = (SrndQuery)q.Clone();
+                    queries.Add(new FieldsQuery(qc, fieldName, fieldOp));
+                }
+                OrQuery oq = new OrQuery(queries,
+                                        true /* infix OR for field names */,
+                                        OrOperatorName);
+                // System.out.println(getClass().toString() + ", fields expanded: " + oq.toString()); /* needs testing */
+                return oq.MakeLuceneQueryField(null, qf);
+            }
+        }
+
+        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
+        {
+            return MakeLuceneQueryNoBoost(qf); /* use this.fieldNames instead of fieldName */
+        }
+
+        public virtual IEnumerable<string> FieldNames { get { return fieldNames; } }
+
+        public virtual char FieldOperator { get { return fieldOp; } }
+
+        public override string ToString()
+        {
+            StringBuilder r = new StringBuilder();
+            r.Append("(");
+            FieldNamesToString(r);
+            r.Append(q.ToString());
+            r.Append(")");
+            return r.ToString();
+        }
+
+        protected virtual void FieldNamesToString(StringBuilder r)
+        {
+            foreach (var fieldName in FieldNames)
+            {
+                r.Append(fieldName);
+                r.Append(FieldOperator);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/NotQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/NotQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/NotQuery.cs
new file mode 100644
index 0000000..30d40a8
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/NotQuery.cs
@@ -0,0 +1,48 @@
+\ufeffusing Lucene.Net.Search;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Factory for prohibited clauses
+    /// </summary>
+    public class NotQuery : ComposedQuery
+    {
+        public NotQuery(IEnumerable<SrndQuery> queries, string opName)
+            : base(queries, true /* infix */, opName)
+        {
+        }
+
+        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
+        {
+            var luceneSubQueries = MakeLuceneSubQueriesField(fieldName, qf);
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(luceneSubQueries.FirstOrDefault(), BooleanClause.Occur.MUST);
+            SrndBooleanQuery.AddQueriesToBoolean(bq,
+                // FIXME: do not allow weights on prohibited subqueries.
+                    //luceneSubQueries.subList(1, luceneSubQueries.size()),
+                    luceneSubQueries.Skip(1).ToList(),
+                // later subqueries: not required, prohibited
+                    BooleanClause.Occur.MUST_NOT);
+            return bq;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs
new file mode 100644
index 0000000..f7d0036
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs
@@ -0,0 +1,71 @@
+\ufeffusing Lucene.Net.Search;
+using System.Collections.Generic;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Factory for disjunctions
+    /// </summary>
+    public class OrQuery : ComposedQuery, IDistanceSubQuery
+    {
+        public OrQuery(IEnumerable<SrndQuery> queries, bool infix, string opName)
+            : base(queries, infix, opName)
+        {
+        }
+
+        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
+        {
+            return SrndBooleanQuery.MakeBooleanQuery(
+                /* subqueries can be individually boosted */
+                MakeLuceneSubQueriesField(fieldName, qf), BooleanClause.Occur.SHOULD);
+        }
+
+        public virtual string DistanceSubQueryNotAllowed()
+        {
+            var sqi = GetSubQueriesEnumerator();
+            while (sqi.MoveNext())
+            {
+                SrndQuery leq = sqi.Current;
+                if (leq is IDistanceSubQuery)
+                {
+                    string m = ((IDistanceSubQuery)leq).DistanceSubQueryNotAllowed();
+                    if (m != null)
+                    {
+                        return m;
+                    }
+                }
+                else
+                {
+                    return "subquery not allowed: " + leq.ToString();
+                }
+            }
+            return null;
+        }
+
+        public virtual void AddSpanQueries(SpanNearClauseFactory sncf)
+        {
+            var sqi = GetSubQueriesEnumerator();
+            while (sqi.MoveNext())
+            {
+                ((IDistanceSubQuery)sqi.Current).AddSpanQueries(sncf);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs
new file mode 100644
index 0000000..030923f
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs
@@ -0,0 +1,85 @@
+\ufeffusing Lucene.Net.Index;
+using System;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public abstract class RewriteQuery<SQ> : Search.Query
+    {
+        protected readonly SQ srndQuery;
+        protected readonly string fieldName;
+        protected readonly BasicQueryFactory qf;
+
+        public RewriteQuery(
+            SQ srndQuery,
+            String fieldName,
+            BasicQueryFactory qf)
+        {
+            this.srndQuery = srndQuery;
+            this.fieldName = fieldName;
+            this.qf = qf;
+        }
+
+        public abstract override Search.Query Rewrite(IndexReader reader);
+
+        public override string ToString()
+        {
+            return ToString(null);
+        }
+
+        public override string ToString(string field)
+        {
+            return GetType().Name
+                + (field == null ? "" : "(unused: " + field + ")")
+                + "(" + fieldName
+                + ", " + srndQuery.ToString()
+                + ", " + qf.ToString()
+                + ")";
+        }
+
+        public override int GetHashCode()
+        {
+            return GetType().GetHashCode()
+                ^ fieldName.GetHashCode()
+                ^ qf.GetHashCode()
+                ^ srndQuery.GetHashCode();
+        }
+
+        public override bool Equals(object obj)
+        {
+            if (obj == null)
+                return false;
+            if (!GetType().Equals(obj.GetType()))
+                return false;
+            RewriteQuery<SQ> other = (RewriteQuery<SQ>)obj;
+            return fieldName.Equals(other.fieldName)
+                && qf.Equals(other.qf)
+                && srndQuery.Equals(other.srndQuery);
+        }
+
+        /// <summary>
+        /// Not supported by this query.
+        /// </summary>
+        /// <exception cref="NotSupportedException">throws NotSupportedException always: clone is not supported.</exception>
+        public override object Clone()
+        {
+            throw new NotSupportedException();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
new file mode 100644
index 0000000..5e39e03
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
@@ -0,0 +1,118 @@
+\ufeffusing Lucene.Net.Index;
+using System;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Base class for queries that expand to sets of simple terms.
+    /// </summary>
+    public abstract class SimpleTerm : SrndQuery, IDistanceSubQuery, IComparable<SimpleTerm>
+    {
+        public SimpleTerm(bool q) 
+        { 
+            quoted = q; 
+        }
+
+        private bool quoted;
+        internal bool IsQuoted { get { return quoted; } }
+
+        public virtual string Quote { get { return "\""; }}
+        public virtual string FieldOperator { get { return "/"; } }
+
+        public abstract string ToStringUnquoted();
+
+        [Obsolete("deprecated (March 2011) Not normally used, to be removed from Lucene 4.0. This class implementing Comparable is to be removed at the same time.")]
+        public int CompareTo(SimpleTerm ost)
+        {
+            /* for ordering terms and prefixes before using an index, not used */
+            return this.ToStringUnquoted().CompareTo(ost.ToStringUnquoted());
+        }
+
+        protected virtual void SuffixToString(StringBuilder r) { } /* override for prefix query */
+
+
+        public override string ToString()
+        {
+            StringBuilder r = new StringBuilder();
+            if (IsQuoted)
+            {
+                r.Append(Quote);
+            }
+            r.Append(ToStringUnquoted());
+            if (IsQuoted)
+            {
+                r.Append(Quote);
+            }
+            SuffixToString(r);
+            WeightToString(r);
+            return r.ToString();
+        }
+
+        public abstract void VisitMatchingTerms(
+                            IndexReader reader,
+                            string fieldName,
+                            IMatchingTermVisitor mtv);
+
+        /// <summary>
+        /// Callback to visit each matching term during "rewrite"
+        /// in <see cref="M:VisitMatchingTerm(Term)"/>
+        /// </summary>
+        public interface IMatchingTermVisitor
+        {
+            void VisitMatchingTerm(Term t);
+        }
+
+        public string DistanceSubQueryNotAllowed()
+        {
+            return null;
+        }
+
+        public void AddSpanQueries(SpanNearClauseFactory sncf)
+        {
+            VisitMatchingTerms(
+                sncf.IndexReader,
+                sncf.FieldName,
+                new AddSpanQueriesMatchingTermVisitor(sncf, Weight));
+        }
+
+        internal class AddSpanQueriesMatchingTermVisitor : IMatchingTermVisitor
+        {
+            private readonly SpanNearClauseFactory sncf;
+            private readonly float weight;
+
+            public AddSpanQueriesMatchingTermVisitor(SpanNearClauseFactory sncf, float weight)
+            {
+                this.sncf = sncf;
+                this.weight = weight;
+            }
+
+            public void VisitMatchingTerm(Term term)
+            {
+                sncf.AddTermWeighted(term, weight);
+            }
+        }
+
+        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
+        {
+            return new SimpleTermRewriteQuery(this, fieldName, qf);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
new file mode 100644
index 0000000..6502d6c
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
@@ -0,0 +1,64 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Search;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    internal class SimpleTermRewriteQuery : RewriteQuery<SimpleTerm>
+    {
+        public  SimpleTermRewriteQuery(
+            SimpleTerm srndQuery,
+            string fieldName,
+            BasicQueryFactory qf)
+            : base(srndQuery, fieldName, qf)
+        {
+        }
+
+        public override Search.Query Rewrite(IndexReader reader)
+        {
+            var luceneSubQueries = new List<Search.Query>();
+            srndQuery.VisitMatchingTerms(reader, fieldName, 
+                new SimpleTermRewriteMatchingTermVisitor(luceneSubQueries, qf));
+            return (luceneSubQueries.Count == 0) ? SrndQuery.TheEmptyLcnQuery
+                : (luceneSubQueries.Count == 1) ? luceneSubQueries.First()
+                : SrndBooleanQuery.MakeBooleanQuery(
+                /* luceneSubQueries all have default weight */
+                luceneSubQueries, BooleanClause.Occur.SHOULD); /* OR the subquery terms */
+        }
+
+        internal class SimpleTermRewriteMatchingTermVisitor : SimpleTerm.IMatchingTermVisitor
+        {
+            private readonly IList<Search.Query> luceneSubQueries;
+            private readonly BasicQueryFactory qf;
+
+            public SimpleTermRewriteMatchingTermVisitor(IList<Search.Query> luceneSubQueries, BasicQueryFactory qf)
+            {
+                this.luceneSubQueries = luceneSubQueries;
+                this.qf = qf;
+            }
+
+            public void VisitMatchingTerm(Term term)
+            {
+                luceneSubQueries.Add(qf.NewTermQuery(term));
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs b/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
new file mode 100644
index 0000000..6cddb9c
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
@@ -0,0 +1,93 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Search.Spans;
+using Lucene.Net.Support;
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Factory for <see cref="SpanOrQuery"/>
+    /// </summary>
+    public class SpanNearClauseFactory
+    {
+        public SpanNearClauseFactory(IndexReader reader, string fieldName, BasicQueryFactory qf) {
+            this.reader = reader;
+            this.fieldName = fieldName;
+            this.weightBySpanQuery = new HashMap<SpanQuery, float>();
+            this.qf = qf;
+          }
+
+        private IndexReader reader;
+        private string fieldName;
+        private IDictionary<SpanQuery, float> weightBySpanQuery;
+        private BasicQueryFactory qf;
+
+        public virtual IndexReader IndexReader { get { return reader; } }
+
+        public virtual string FieldName { get { return fieldName; } }
+
+        public virtual BasicQueryFactory BasicQueryFactory { get { return qf; } }
+
+        public virtual int Count { get { return weightBySpanQuery.Count; } }
+
+        public virtual void Clear() { weightBySpanQuery.Clear(); }
+
+        protected virtual void AddSpanQueryWeighted(SpanQuery sq, float weight)
+        {
+            float w;
+            if (weightBySpanQuery.ContainsKey(sq))
+                w = weightBySpanQuery[sq] + weight;
+            else
+                w = weight;
+            weightBySpanQuery[sq] = w;
+        }
+
+        public virtual void AddTermWeighted(Term t, float weight)
+        {
+            SpanTermQuery stq = qf.NewSpanTermQuery(t);
+            /* CHECKME: wrap in Hashable...? */
+            AddSpanQueryWeighted(stq, weight);
+        }
+
+        public virtual void AddSpanQuery(Search.Query q)
+        {
+            if (q == SrndQuery.TheEmptyLcnQuery)
+                return;
+            if (!(q is SpanQuery))
+                throw new InvalidOperationException("Expected SpanQuery: " + q.ToString(FieldName));
+            AddSpanQueryWeighted((SpanQuery)q, q.Boost);
+        }
+
+        public SpanQuery MakeSpanClause()
+        {
+            List<SpanQuery> spanQueries = new List<SpanQuery>();
+            foreach (var wsq in weightBySpanQuery)
+            {
+                wsq.Key.Boost = wsq.Value;
+                spanQueries.Add(wsq.Key);
+            }
+            if (spanQueries.Count == 1)
+                return spanQueries[0];
+            else
+                return new SpanOrQuery(spanQueries.ToArray());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs
new file mode 100644
index 0000000..7a1a8b3
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs
@@ -0,0 +1,51 @@
+\ufeffusing Lucene.Net.Search;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public static class SrndBooleanQuery
+    {
+        public static void AddQueriesToBoolean(
+            BooleanQuery bq,
+            IEnumerable<Search.Query> queries,
+            BooleanClause.Occur occur)
+        {
+            foreach (var query in queries)
+            {
+                bq.Add(query, occur);
+            }
+        }
+
+        public static Search.Query MakeBooleanQuery(
+            IEnumerable<Search.Query> queries,
+            BooleanClause.Occur occur)
+        {
+            if (queries.Count() <= 1)
+            {
+                throw new InvalidOperationException("Too few subqueries: " + queries.Count());
+            }
+            BooleanQuery bq = new BooleanQuery();
+            AddQueriesToBoolean(bq, queries, occur);
+            return bq;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/SrndPrefixQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SrndPrefixQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/SrndPrefixQuery.cs
new file mode 100644
index 0000000..4044b09
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SrndPrefixQuery.cs
@@ -0,0 +1,108 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Util;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Query that matches String prefixes
+    /// </summary>
+    public class SrndPrefixQuery : SimpleTerm
+    {
+        private readonly BytesRef prefixRef;
+        public SrndPrefixQuery(string prefix, bool quoted, char truncator)
+            : base(quoted)
+        {
+            this.prefix = prefix;
+            prefixRef = new BytesRef(prefix);
+            this.truncator = truncator;
+        }
+
+        private readonly string prefix;
+        public virtual string Prefix { get { return prefix; } }
+
+        private readonly char truncator;
+        public virtual char SuffixOperator { get { return truncator; } }
+
+        public virtual Term GetLucenePrefixTerm(string fieldName)
+        {
+            return new Term(fieldName, Prefix);
+        }
+
+        public override string ToStringUnquoted()
+        {
+            return Prefix;
+        }
+
+        protected override void SuffixToString(StringBuilder r)
+        {
+            r.Append(SuffixOperator);
+        }
+
+        public override void VisitMatchingTerms(IndexReader reader, string fieldName, IMatchingTermVisitor mtv)
+        {
+            /* inspired by PrefixQuery.rewrite(): */
+            Terms terms = MultiFields.GetTerms(reader, fieldName);
+            if (terms != null)
+            {
+                TermsEnum termsEnum = terms.Iterator(null);
+
+                bool skip = false;
+                TermsEnum.SeekStatus status = termsEnum.SeekCeil(new BytesRef(Prefix));
+                if (status == TermsEnum.SeekStatus.FOUND)
+                {
+                    mtv.VisitMatchingTerm(GetLucenePrefixTerm(fieldName));
+                }
+                else if (status == TermsEnum.SeekStatus.NOT_FOUND)
+                {
+                    if (StringHelper.StartsWith(termsEnum.Term(), prefixRef))
+                    {
+                        mtv.VisitMatchingTerm(new Term(fieldName, termsEnum.Term().Utf8ToString()));
+                    }
+                    else
+                    {
+                        skip = true;
+                    }
+                }
+                else
+                {
+                    // EOF
+                    skip = true;
+                }
+
+                if (!skip)
+                {
+                    while (true)
+                    {
+                        BytesRef text = termsEnum.Next();
+                        if (text != null && StringHelper.StartsWith(text, prefixRef))
+                        {
+                            mtv.VisitMatchingTerm(new Term(fieldName, text.Utf8ToString()));
+                        }
+                        else
+                        {
+                            break;
+                        }
+                    }
+                }
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs
new file mode 100644
index 0000000..57b19cc
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs
@@ -0,0 +1,149 @@
+\ufeffusing Lucene.Net.Search;
+using Lucene.Net.Support;
+using System;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Lowest level base class for surround queries 
+    /// </summary>
+    public abstract class SrndQuery : ICloneable
+    {
+        //public SrndQuery() { }
+
+        private float weight = (float)1.0;
+        private bool weighted = false;
+
+        public virtual bool IsWeighted { get { return weighted; } }
+
+        public virtual float Weight 
+        { 
+            get { return weight; }
+            set
+            {
+                weight = value; /* as parsed from the query text */
+                weighted = true;
+            }
+        }
+
+        public virtual string WeightString { get { return Number.ToString(Weight); } }
+
+        public virtual string WeightOperator { get { return "^"; } }
+
+
+        protected virtual void WeightToString(StringBuilder r)
+        { 
+            /* append the weight part of a query */
+            if (IsWeighted)
+            {
+                r.Append(WeightOperator);
+                r.Append(WeightString);
+            }
+        }
+
+        public virtual Search.Query MakeLuceneQueryField(string fieldName, BasicQueryFactory qf)
+        {
+            Search.Query q = MakeLuceneQueryFieldNoBoost(fieldName, qf);
+            if (IsWeighted)
+            {
+                q.Boost=(Weight * q.Boost); /* weight may be at any level in a SrndQuery */
+            }
+            return q;
+        }
+
+        public abstract Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf);
+
+        /// <summary>
+        /// This method is used by <see cref="M:GetHashCode()"/> and <see cref="M:Equals(Object)"/>,
+        /// see LUCENE-2945.
+        /// </summary>
+        /// <returns></returns>
+        public abstract override string ToString();
+
+        public virtual bool IsFieldsSubQueryAcceptable { get { return true; } }
+
+        /// <summary> Shallow clone. Subclasses must override this if they
+        /// need to clone any members deeply,
+        /// </summary>
+        public virtual object Clone()
+        {
+            object clone = null;
+            try
+            {
+                clone = base.MemberwiseClone();
+            }
+            catch (Exception e)
+            {
+                throw new SystemException(e.Message, e); // shouldn't happen
+            }
+            return clone;
+        }
+
+        /// <summary>
+        /// For subclasses of <see cref="SrndQuery"/> within the package
+        /// {@link org.apache.lucene.queryparser.surround.query}
+        /// it is not necessary to override this method, <see cref="M:ToString()"/>
+        /// </summary>
+        public override int GetHashCode()
+        {
+            return GetType().GetHashCode() ^ ToString().GetHashCode();
+        }
+
+        /// <summary>
+        /// For subclasses of <see cref="SrndQuery"/> within the package
+        /// {@link org.apache.lucene.queryparser.surround.query}
+        /// it is not necessary to override this method,
+        /// @see #toString()
+        /// </summary>
+        /// <param name="obj"></param>
+        /// <returns></returns>
+        public override bool Equals(object obj)
+        {
+            if (obj == null)
+                return false;
+            if (!GetType().Equals(obj.GetType()))
+                return false;
+            return ToString().Equals(obj.ToString());
+        }
+
+        /// <summary> An empty Lucene query  </summary>
+        public readonly static Search.Query TheEmptyLcnQuery = new EmptyLcnQuery(); /* no changes allowed */ 
+  
+        internal sealed class EmptyLcnQuery : BooleanQuery
+        {
+            public override float Boost
+            {
+                get { return base.Boost; }
+                set { throw new NotSupportedException(); }
+            }
+
+            public override void Add(BooleanClause clause)
+            {
+                throw new NotSupportedException();
+            }
+
+            public override void Add(Search.Query query, BooleanClause.Occur occur)
+            {
+                throw new NotSupportedException();
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/SrndTermQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SrndTermQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/SrndTermQuery.cs
new file mode 100644
index 0000000..45885a1
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SrndTermQuery.cs
@@ -0,0 +1,63 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Util;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Simple single-term clause
+    /// </summary>
+    public class SrndTermQuery : SimpleTerm
+    {
+        public SrndTermQuery(string termText, bool quoted)
+            : base(quoted)
+        {
+            this.termText = termText;
+        }
+
+        private readonly string termText;
+        public virtual string TermText { get { return termText; } }
+
+        public virtual Term GetLuceneTerm(string fieldName)
+        {
+            return new Term(fieldName, TermText);
+        }
+
+        public override string ToStringUnquoted()
+        {
+            return TermText;
+        }
+
+        public override void VisitMatchingTerms(IndexReader reader, string fieldName, IMatchingTermVisitor mtv)
+        {
+            /* check term presence in index here for symmetry with other SimpleTerm's */
+            Terms terms = MultiFields.GetTerms(reader, fieldName);
+            if (terms != null)
+            {
+                TermsEnum termsEnum = terms.Iterator(null);
+
+                TermsEnum.SeekStatus status = termsEnum.SeekCeil(new BytesRef(TermText));
+                if (status == TermsEnum.SeekStatus.FOUND)
+                {
+                    mtv.VisitMatchingTerm(GetLuceneTerm(fieldName));
+                }
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/SrndTruncQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SrndTruncQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/SrndTruncQuery.cs
new file mode 100644
index 0000000..5ed9ff3
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SrndTruncQuery.cs
@@ -0,0 +1,139 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Util;
+using System;
+using System.Text;
+using System.Text.RegularExpressions;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public class SrndTruncQuery : SimpleTerm
+    {
+        public SrndTruncQuery(string truncated, char unlimited, char mask)
+            : base(false) /* not quoted */
+        {
+            this.truncated = truncated;
+            this.unlimited = unlimited;
+            this.mask = mask;
+            TruncatedToPrefixAndPattern();
+        }
+
+        private readonly string truncated;
+        private readonly char unlimited;
+        private readonly char mask;
+
+        private string prefix;
+        private BytesRef prefixRef;
+        private Regex pattern;
+
+        public virtual string Truncated { get { return truncated; } }
+
+        public override string ToStringUnquoted()
+        {
+            return Truncated;
+        }
+
+        protected virtual bool MatchingChar(char c)
+        {
+            return (c != unlimited) && (c != mask);
+        }
+
+        protected virtual void AppendRegExpForChar(char c, StringBuilder re)
+        {
+            if (c == unlimited)
+                re.Append(".*");
+            else if (c == mask)
+                re.Append(".");
+            else
+                re.Append(c);
+        }
+
+        protected virtual void TruncatedToPrefixAndPattern()
+        {
+            int i = 0;
+            while ((i < truncated.Length) && MatchingChar(truncated[i]))
+            {
+                i++;
+            }
+            prefix = truncated.Substring(0, i);
+            prefixRef = new BytesRef(prefix);
+
+            StringBuilder re = new StringBuilder();
+            while (i < truncated.Length)
+            {
+                AppendRegExpForChar(truncated[i], re);
+                i++;
+            }
+            pattern = new Regex(re.ToString(), RegexOptions.Compiled);
+        }
+
+        // TODO: Finish implementation
+        public override void VisitMatchingTerms(IndexReader reader, string fieldName, SimpleTerm.IMatchingTermVisitor mtv)
+        {
+            throw new NotImplementedException("Need to translate this from Java's whacky RegEx syntax");
+            //int prefixLength = prefix.Length;
+            //Terms terms = MultiFields.GetTerms(reader, fieldName);
+            //if (terms != null)
+            //{
+            //    MatchCollection matcher = pattern.Matches("");
+            //    try
+            //    {
+            //        TermsEnum termsEnum = terms.Iterator(null);
+
+            //        TermsEnum.SeekStatus status = termsEnum.SeekCeil(prefixRef);
+            //        BytesRef text;
+            //        if (status == TermsEnum.SeekStatus.FOUND)
+            //        {
+            //            text = prefixRef;
+            //        }
+            //        else if (status == TermsEnum.SeekStatus.NOT_FOUND)
+            //        {
+            //            text = termsEnum.Term();
+            //        }
+            //        else
+            //        {
+            //            text = null;
+            //        }
+
+            //        while (text != null)
+            //        {
+            //            if (text != null && StringHelper.StartsWith(text, prefixRef))
+            //            {
+            //                string textString = text.Utf8ToString();
+            //                matcher.Reset(textString.Substring(prefixLength));
+            //                if (matcher.Success)
+            //                {
+            //                    mtv.VisitMatchingTerm(new Term(fieldName, textString));
+            //                }
+            //            }
+            //            else
+            //            {
+            //                break;
+            //            }
+            //            text = termsEnum.Next();
+            //        }
+            //    }
+            //    finally
+            //    {
+            //        matcher.Reset();
+            //    }
+            //}
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/TooManyBasicQueries.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/TooManyBasicQueries.cs b/src/Lucene.Net.QueryParser/Surround/Query/TooManyBasicQueries.cs
new file mode 100644
index 0000000..27f313c
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/TooManyBasicQueries.cs
@@ -0,0 +1,30 @@
+\ufeffnamespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Exception thrown when <see cref="BasicQueryFactory"/> would exceed the limit
+    /// of query clauses.
+    /// </summary>
+    public class TooManyBasicQueries : System.IO.IOException
+    {
+        public TooManyBasicQueries(int maxBasicQueries)
+            : base("Exceeded maximum of " + maxBasicQueries + " basic queries.")
+        { }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs
new file mode 100644
index 0000000..10756cf
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs
@@ -0,0 +1,341 @@
+\ufeffusing System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.QueryParser.Classic;
+using Lucene.Net.Search;
+using Lucene.Net.Store;
+using Lucene.Net.Util;
+using NUnit.Framework;
+
+namespace Lucene.Net.QueryParser.Analyzing
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestAnalyzingQueryParser : LuceneTestCase
+    {
+        private readonly static string FIELD = "field";
+
+        private Analyzer a;
+
+        private string[] wildcardInput;
+        private string[] wildcardExpected;
+        private string[] prefixInput;
+        private string[] prefixExpected;
+        private string[] rangeInput;
+        private string[] rangeExpected;
+        private string[] fuzzyInput;
+        private string[] fuzzyExpected;
+
+        private IDictionary<string, string> wildcardEscapeHits = new Dictionary<string, string>();
+        private IDictionary<string, string> wildcardEscapeMisses = new Dictionary<string, string>();
+
+        public override void SetUp()
+        {
+            base.SetUp();
+            wildcardInput = new string[] { "*bersetzung �ber*ung",
+                "M�tley Cr\u00fce M�tl?* Cr�?", "Ren�e Zellweger Ren?? Zellw?ger" };
+            wildcardExpected = new string[] { "*bersetzung uber*ung", "motley crue motl?* cru?",
+                "renee zellweger ren?? zellw?ger" };
+
+            prefixInput = new string[] { "�bersetzung �bersetz*",
+                "M�tley Cr�e M�tl* cr�*", "Ren�? Zellw*" };
+            prefixExpected = new string[] { "ubersetzung ubersetz*", "motley crue motl* cru*",
+                "rene? zellw*" };
+
+            rangeInput = new string[] { "[aa TO bb]", "{Ana�s TO Zo�}" };
+            rangeExpected = new string[] { "[aa TO bb]", "{anais TO zoe}" };
+
+            fuzzyInput = new string[] { "�bersetzung �bersetzung~0.9",
+                "M�tley Cr�e M�tley~0.75 Cr�e~0.5",
+                "Ren�e Zellweger Ren�e~0.9 Zellweger~" };
+            fuzzyExpected = new string[] { "ubersetzung ubersetzung~1",
+                "motley crue motley~1 crue~2", "renee zellweger renee~0 zellweger~2" };
+
+            wildcardEscapeHits["m�*tley"] = "moatley";
+
+            // need to have at least one genuine wildcard to trigger the wildcard analysis
+            // hence the * before the y
+            wildcardEscapeHits["m�\\*tl*y"] = "mo*tley";
+
+            // escaped backslash then true wildcard
+            wildcardEscapeHits["m�\\\\*tley"] = "mo\\atley";
+
+            // escaped wildcard then true wildcard
+            wildcardEscapeHits["m�\\??ley"] = "mo?tley";
+
+            // the first is an escaped * which should yield a miss
+            wildcardEscapeMisses["m�\\*tl*y"] = "moatley";
+
+            a = new ASCIIAnalyzer();
+        }
+
+        [Test]
+        public void TestSingleChunkExceptions()
+        {
+            bool ex = false;
+            string termStr = "the*tre";
+
+            Analyzer stopsAnalyzer = new MockAnalyzer
+                (Random(), MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET);
+            try
+            {
+                string q = ParseWithAnalyzingQueryParser(termStr, stopsAnalyzer, true);
+            }
+            catch (ParseException e)
+            {
+                if (e.Message.Contains("returned nothing"))
+                {
+                    ex = true;
+                }
+            }
+            assertEquals("Should have returned nothing", true, ex);
+            ex = false;
+
+            AnalyzingQueryParser qp = new AnalyzingQueryParser(TEST_VERSION_CURRENT, FIELD, a);
+            try
+            {
+                qp.AnalyzeSingleChunk(FIELD, "", "not a single chunk");
+            }
+            catch (ParseException e)
+            {
+                if (e.Message.Contains("multiple terms"))
+                {
+                    ex = true;
+                }
+            }
+            assertEquals("Should have produced multiple terms", true, ex);
+        }
+
+        [Test]
+        public void TestWildcardAlone()
+        {
+            //seems like crazy edge case, but can be useful in concordance 
+            bool pex = false;
+            try
+            {
+                Query q = GetAnalyzedQuery("*", a, false);
+            }
+            catch (ParseException e)
+            {
+                pex = true;
+            }
+            assertEquals("Wildcard alone with allowWildcard=false", true, pex);
+
+            pex = false;
+            try
+            {
+                String qString = ParseWithAnalyzingQueryParser("*", a, true);
+                assertEquals("Every word", "*", qString);
+            }
+            catch (ParseException e)
+            {
+                pex = true;
+            }
+
+            assertEquals("Wildcard alone with allowWildcard=true", false, pex);
+        }
+
+        [Test]
+        public void TestWildCardEscapes()
+        {
+            foreach (var entry in wildcardEscapeHits)
+            {
+                Query q = GetAnalyzedQuery(entry.Key, a, false);
+                assertEquals("WildcardEscapeHits: " + entry.Key, true, IsAHit(q, entry.Value, a));
+            }
+            foreach (var entry in wildcardEscapeMisses)
+            {
+                Query q = GetAnalyzedQuery(entry.Key, a, false);
+                assertEquals("WildcardEscapeMisses: " + entry.Key, false, IsAHit(q, entry.Value, a));
+            }
+        }
+
+        [Test]
+        public void TestWildCardQueryNoLeadingAllowed()
+        {
+            bool ex = false;
+            try
+            {
+                string q = ParseWithAnalyzingQueryParser(wildcardInput[0], a, false);
+
+            }
+            catch (ParseException e)
+            {
+                ex = true;
+            }
+            assertEquals("Testing initial wildcard not allowed",
+                true, ex);
+        }
+
+        [Test]
+        public void TestWildCardQuery()
+        {
+            for (int i = 0; i < wildcardInput.Length; i++)
+            {
+                assertEquals("Testing wildcards with analyzer " + a.GetType() + ", input string: "
+                    + wildcardInput[i], wildcardExpected[i], ParseWithAnalyzingQueryParser(wildcardInput[i], a, true));
+            }
+        }
+
+        [Test]
+        public void TestPrefixQuery()
+        {
+            for (int i = 0; i < prefixInput.Length; i++)
+            {
+                assertEquals("Testing prefixes with analyzer " + a.GetType() + ", input string: "
+                    + prefixInput[i], prefixExpected[i], ParseWithAnalyzingQueryParser(prefixInput[i], a, false));
+            }
+        }
+
+        [Test]
+        public void TestRangeQuery()
+        {
+            for (int i = 0; i < rangeInput.Length; i++)
+            {
+                assertEquals("Testing ranges with analyzer " + a.GetType() + ", input string: "
+                    + rangeInput[i], rangeExpected[i], ParseWithAnalyzingQueryParser(rangeInput[i], a, false));
+            }
+        }
+
+        [Test]
+        public void TestFuzzyQuery()
+        {
+            for (int i = 0; i < fuzzyInput.Length; i++)
+            {
+                assertEquals("Testing fuzzys with analyzer " + a.GetType() + ", input string: "
+                  + fuzzyInput[i], fuzzyExpected[i], ParseWithAnalyzingQueryParser(fuzzyInput[i], a, false));
+            }
+        }
+
+
+        private string ParseWithAnalyzingQueryParser(string s, Analyzer a, bool allowLeadingWildcard)
+        {
+            Query q = GetAnalyzedQuery(s, a, allowLeadingWildcard);
+            return q.ToString(FIELD);
+        }
+
+        private Query GetAnalyzedQuery(string s, Analyzer a, bool allowLeadingWildcard)
+        {
+            AnalyzingQueryParser qp = new AnalyzingQueryParser(TEST_VERSION_CURRENT, FIELD, a);
+            qp.AllowLeadingWildcard = allowLeadingWildcard;
+            Query q = qp.Parse(s);
+            return q;
+        }
+
+        internal sealed class FoldingFilter : TokenFilter
+        {
+            private readonly ICharTermAttribute termAtt;
+
+            public FoldingFilter(TokenStream input)
+                : base(input)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+            }
+
+            public sealed override bool IncrementToken()
+            {
+                if (input.IncrementToken())
+                {
+                    char[] term = termAtt.Buffer();
+                    for (int i = 0; i < term.Length; i++)
+                        switch (term[i])
+                        {
+                            case '�':
+                                term[i] = 'u';
+                                break;
+                            case '�':
+                                term[i] = 'o';
+                                break;
+                            case '�':
+                                term[i] = 'e';
+                                break;
+                            case '�':
+                                term[i] = 'i';
+                                break;
+                        }
+                    return true;
+                }
+                else
+                {
+                    return false;
+                }
+            }
+        }
+
+        internal sealed class ASCIIAnalyzer : Analyzer
+        {
+
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+                return new TokenStreamComponents(result, new FoldingFilter(result));
+            }
+        }
+
+        // LUCENE-4176
+        [Test]
+        public void TestByteTerms()
+        {
+            string s = "\u0e40\u0e02";
+            Analyzer analyzer = new MockBytesAnalyzer();
+            Classic.QueryParser qp = new AnalyzingQueryParser(TEST_VERSION_CURRENT, FIELD, analyzer);
+            Query q = qp.Parse("[\u0e40\u0e02 TO \u0e40\u0e02]");
+            assertEquals(true, IsAHit(q, s, analyzer));
+        }
+
+        private bool IsAHit(Query q, string content, Analyzer analyzer)
+        {
+            int hits;
+            using (Directory ramDir = NewDirectory())
+            {
+                using (RandomIndexWriter writer = new RandomIndexWriter(Random(), ramDir, analyzer))
+                {
+                    Document doc = new Document();
+                    FieldType fieldType = new FieldType();
+                    fieldType.Indexed = (true);
+                    fieldType.Tokenized = (true);
+                    fieldType.Stored = (true);
+                    Field field = new Field(FIELD, content, fieldType);
+                    doc.Add(field);
+                    writer.AddDocument(doc);
+                }
+                using (DirectoryReader ir = DirectoryReader.Open(ramDir))
+                {
+                    IndexSearcher @is = new IndexSearcher(ir);
+
+                    hits = @is.Search(q, 10).TotalHits;
+                }
+            }
+            if (hits == 1)
+            {
+                return true;
+            }
+            else
+            {
+                return false;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs b/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
new file mode 100644
index 0000000..350f181
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
@@ -0,0 +1,278 @@
+\ufeffusing System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using NUnit.Framework;
+using Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Search;
+using Lucene.Net.Util;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    [TestFixture]
+    public class TestMultiAnalyzer_ : BaseTokenStreamTestCase
+    {
+
+        private static int multiToken = 0;
+
+        [Test]
+        public void TestMultiAnalyzer()
+        {
+
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "", new MultiAnalyzer());
+
+            // trivial, no multiple tokens:
+            assertEquals("foo", qp.Parse("foo").toString());
+            assertEquals("foo", qp.Parse("\"foo\"").toString());
+            assertEquals("foo foobar", qp.Parse("foo foobar").toString());
+            assertEquals("\"foo foobar\"", qp.Parse("\"foo foobar\"").toString());
+            assertEquals("\"foo foobar blah\"", qp.Parse("\"foo foobar blah\"").toString());
+
+            // two tokens at the same position:
+            assertEquals("(multi multi2) foo", qp.Parse("multi foo").toString());
+            assertEquals("foo (multi multi2)", qp.Parse("foo multi").toString());
+            assertEquals("(multi multi2) (multi multi2)", qp.Parse("multi multi").toString());
+            assertEquals("+(foo (multi multi2)) +(bar (multi multi2))",
+                qp.Parse("+(foo multi) +(bar multi)").toString());
+            assertEquals("+(foo (multi multi2)) field:\"bar (multi multi2)\"",
+                qp.Parse("+(foo multi) field:\"bar multi\"").toString());
+
+            // phrases:
+            assertEquals("\"(multi multi2) foo\"", qp.Parse("\"multi foo\"").toString());
+            assertEquals("\"foo (multi multi2)\"", qp.Parse("\"foo multi\"").toString());
+            assertEquals("\"foo (multi multi2) foobar (multi multi2)\"",
+                qp.Parse("\"foo multi foobar multi\"").toString());
+
+            // fields:
+            assertEquals("(field:multi field:multi2) field:foo", qp.Parse("field:multi field:foo").toString());
+            assertEquals("field:\"(multi multi2) foo\"", qp.Parse("field:\"multi foo\"").toString());
+
+            // three tokens at one position:
+            assertEquals("triplemulti multi3 multi2", qp.Parse("triplemulti").toString());
+            assertEquals("foo (triplemulti multi3 multi2) foobar",
+                qp.Parse("foo triplemulti foobar").toString());
+
+            // phrase with non-default slop:
+            assertEquals("\"(multi multi2) foo\"~10", qp.Parse("\"multi foo\"~10").toString());
+
+            // phrase with non-default boost:
+            assertEquals("\"(multi multi2) foo\"^2.0", qp.Parse("\"multi foo\"^2").toString());
+
+            // phrase after changing default slop
+            qp.PhraseSlop=(99);
+            assertEquals("\"(multi multi2) foo\"~99 bar",
+                         qp.Parse("\"multi foo\" bar").toString());
+            assertEquals("\"(multi multi2) foo\"~99 \"foo bar\"~2",
+                         qp.Parse("\"multi foo\" \"foo bar\"~2").toString());
+            qp.PhraseSlop=(0);
+
+            // non-default operator:
+            qp.DefaultOperator=(QueryParserBase.AND_OPERATOR);
+            assertEquals("+(multi multi2) +foo", qp.Parse("multi foo").toString());
+
+        }
+
+        [Test]
+        public void TestMultiAnalyzerWithSubclassOfQueryParser()
+        {
+
+            DumbQueryParser qp = new DumbQueryParser("", new MultiAnalyzer());
+            qp.PhraseSlop = (99); // modified default slop
+
+            // direct call to (super's) getFieldQuery to demonstrate differnce
+            // between phrase and multiphrase with modified default slop
+            assertEquals("\"foo bar\"~99",
+                         qp.GetSuperFieldQuery("", "foo bar", true).toString());
+            assertEquals("\"(multi multi2) bar\"~99",
+                         qp.GetSuperFieldQuery("", "multi bar", true).toString());
+
+
+            // ask sublcass to parse phrase with modified default slop
+            assertEquals("\"(multi multi2) foo\"~99 bar",
+                         qp.Parse("\"multi foo\" bar").toString());
+
+        }
+
+        [Test]
+        public void TestPosIncrementAnalyzer()
+        {
+            QueryParser qp = new QueryParser(LuceneVersion.LUCENE_40, "", new PosIncrementAnalyzer());
+            assertEquals("quick brown", qp.Parse("the quick brown").toString());
+            assertEquals("quick brown fox", qp.Parse("the quick brown fox").toString());
+        }
+
+        /// <summary>
+        /// Expands "multi" to "multi" and "multi2", both at the same position,
+        /// and expands "triplemulti" to "triplemulti", "multi3", and "multi2".  
+        /// </summary>
+        private class MultiAnalyzer : Analyzer
+        {
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+                return new TokenStreamComponents(result, new TestFilter(result));
+            }
+        }
+
+        private sealed class TestFilter : TokenFilter
+        {
+
+            private string prevType;
+            private int prevStartOffset;
+            private int prevEndOffset;
+
+            private readonly ICharTermAttribute termAtt;
+            private readonly IPositionIncrementAttribute posIncrAtt;
+            private readonly IOffsetAttribute offsetAtt;
+            private readonly ITypeAttribute typeAtt;
+
+            public TestFilter(TokenStream @in)
+                : base(@in)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+                posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
+                offsetAtt = AddAttribute<IOffsetAttribute>();
+                typeAtt = AddAttribute<ITypeAttribute>();
+            }
+
+            public override sealed bool IncrementToken()
+            {
+                if (multiToken > 0)
+                {
+                    termAtt.SetEmpty().Append("multi" + (multiToken + 1));
+                    offsetAtt.SetOffset(prevStartOffset, prevEndOffset);
+                    typeAtt.Type = (prevType);
+                    posIncrAtt.PositionIncrement = (0);
+                    multiToken--;
+                    return true;
+                }
+                else
+                {
+                    bool next = input.IncrementToken();
+                    if (!next)
+                    {
+                        return false;
+                    }
+                    prevType = typeAtt.Type;
+                    prevStartOffset = offsetAtt.StartOffset();
+                    prevEndOffset = offsetAtt.EndOffset();
+                    string text = termAtt.toString();
+                    if (text.equals("triplemulti"))
+                    {
+                        multiToken = 2;
+                        return true;
+                    }
+                    else if (text.equals("multi"))
+                    {
+                        multiToken = 1;
+                        return true;
+                    }
+                    else
+                    {
+                        return true;
+                    }
+                }
+            }
+
+            public override void Reset()
+            {
+                base.Reset();
+                this.prevType = null;
+                this.prevStartOffset = 0;
+                this.prevEndOffset = 0;
+            }
+        }
+
+        /// <summary>
+        /// Analyzes "the quick brown" as: quick(incr=2) brown(incr=1).
+        /// Does not work correctly for input other than "the quick brown ...".
+        /// </summary>
+        private class PosIncrementAnalyzer : Analyzer
+        {
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+                return new TokenStreamComponents(result, new TestPosIncrementFilter(result));
+            }
+        }
+
+        private sealed class TestPosIncrementFilter : TokenFilter
+        {
+            ICharTermAttribute termAtt;
+            IPositionIncrementAttribute posIncrAtt;
+
+            public TestPosIncrementFilter(TokenStream @in)
+                : base(@in)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+                posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
+            }
+
+            public override sealed bool IncrementToken()
+            {
+                while (input.IncrementToken())
+                {
+                    if (termAtt.toString().equals("the"))
+                    {
+                        // stopword, do nothing
+                    }
+                    else if (termAtt.toString().equals("quick"))
+                    {
+                        posIncrAtt.PositionIncrement = (2);
+                        return true;
+                    }
+                    else
+                    {
+                        posIncrAtt.PositionIncrement = (1);
+                        return true;
+                    }
+                }
+                return false;
+            }
+        }
+
+        /// <summary>
+        /// a very simple subclass of QueryParser
+        /// </summary>
+        private sealed class DumbQueryParser : QueryParser
+        {
+            public DumbQueryParser(string f, Analyzer a)
+                : base(TEST_VERSION_CURRENT, f, a)
+            {
+            }
+
+            // expose super's version 
+            public Query GetSuperFieldQuery(string f, string t, bool quoted)
+            {
+                return base.GetFieldQuery(f, t, quoted);
+            }
+
+            // wrap super's version
+            protected internal override Query GetFieldQuery(string field, string queryText, bool quoted)
+            {
+                return new DumbQueryWrapper(GetSuperFieldQuery(field, queryText, quoted));
+            }
+        }
+
+        /// <summary>
+        /// A very simple wrapper to prevent instanceof checks but uses
+        /// the toString of the query it wraps.
+        /// </summary>
+        private sealed class DumbQueryWrapper : Query
+        {
+            private Query q;
+            public DumbQueryWrapper(Query q)
+            {
+                this.q = q;
+            }
+
+            public override string ToString(string field)
+            {
+                return q.ToString(field);
+            }
+        }
+
+    }
+}


[24/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Ext/ParserExtension.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Ext/ParserExtension.cs b/src/Lucene.Net.QueryParser/Ext/ParserExtension.cs
new file mode 100644
index 0000000..27b9212
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Ext/ParserExtension.cs
@@ -0,0 +1,50 @@
+\ufeffusing Lucene.Net.QueryParser.Classic;
+using Lucene.Net.Search;
+
+namespace Lucene.Net.QueryParser.Ext
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// This class represents an extension base class to the Lucene standard
+    /// <see cref="Classic.QueryParser"/>. The
+    /// <see cref="Classic.QueryParser"/> is generated by the JavaCC
+    /// parser generator. Changing or adding functionality or syntax in the standard
+    /// query parser requires changes to the JavaCC source file. To enable extending
+    /// the standard query parser without changing the JavaCC sources and re-generate
+    /// the parser the <see cref="ParserExtension"/> can be customized and plugged into an
+    /// instance of <see cref="ExtendableQueryParser"/>, a direct subclass of
+    /// <see cref="Classic.QueryParser"/>.
+    ///  
+    /// <see cref="Extensions"/>
+    /// <see cref="ExtendableQueryParser"/>
+    /// </summary>
+    public abstract class ParserExtension
+    {
+        /// <summary>
+        /// Processes the given <see cref="ExtensionQuery"/> and returns a corresponding
+        /// <see cref="Query"/> instance. Subclasses must either return a <see cref="Query"/>
+        /// instance or raise a <see cref="ParseException"/>. This method must not return
+        /// <code>null</code>.
+        /// </summary>
+        /// <param name="query">the extension query</param>
+        /// <returns>a new query instance</returns>
+        /// <exception cref="ParseException">if the query can not be parsed.</exception>
+        public abstract Query Parse(ExtensionQuery query);
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Flexible/Standard/CommonQueryParserConfiguration.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/CommonQueryParserConfiguration.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/CommonQueryParserConfiguration.cs
new file mode 100644
index 0000000..ae3809f
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/CommonQueryParserConfiguration.cs
@@ -0,0 +1,106 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Documents;
+using Lucene.Net.Search;
+using System;
+using System.Globalization;
+
+namespace Lucene.Net.QueryParser.Flexible.Standard
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Configuration options common across queryparser implementations.
+    /// </summary>
+    public interface ICommonQueryParserConfiguration
+    {
+        /// <summary>
+        /// Whether terms of multi-term queries (e.g., wildcard,
+        /// prefix, fuzzy and range) should be automatically
+        /// lower-cased or not.  Default is <code>true</code>.
+        /// </summary>
+        bool LowercaseExpandedTerms { get; set; }
+
+        /// <summary>
+        /// Set to <code>true</code> to allow leading wildcard characters.
+        /// <p>
+        /// When set, <code>*</code> or <code>?</code> are allowed as the first
+        /// character of a PrefixQuery and WildcardQuery. Note that this can produce
+        /// very slow queries on big indexes.
+        /// <p>
+        /// Default: false.
+        /// </summary>
+        bool AllowLeadingWildcard { get; set; }
+
+        /// <summary>
+        /// Set to <code>true</code> to enable position increments in result query.
+        /// <p>
+        /// When set, result phrase and multi-phrase queries will be aware of position
+        /// increments. Useful when e.g. a StopFilter increases the position increment
+        /// of the token that follows an omitted token.
+        /// <p>
+        /// Default: false.
+        /// </summary>
+        bool EnablePositionIncrements { get; set; }
+
+        /// <summary>
+        /// By default, it uses 
+        /// {@link MultiTermQuery#CONSTANT_SCORE_AUTO_REWRITE_DEFAULT} when creating a
+        /// prefix, wildcard and range queries. This implementation is generally
+        /// preferable because it a) Runs faster b) Does not have the scarcity of terms
+        /// unduly influence score c) avoids any {@link TooManyListenersException}
+        /// exception. However, if your application really needs to use the
+        /// old-fashioned boolean queries expansion rewriting and the above points are
+        /// not relevant then use this change the rewrite method.
+        /// </summary>
+        MultiTermQuery.RewriteMethod MultiTermRewriteMethod { get; set; }
+
+        /// <summary>
+        /// Get or Set the prefix length for fuzzy queries. Default is 0.
+        /// </summary>
+        int FuzzyPrefixLength { get; set; }
+
+        /// <summary>
+        /// Get or Set locale used by date range parsing.
+        /// </summary>
+        CultureInfo Locale { get; set; }
+
+        /// <summary>
+        /// Gets or Sets the time zone.
+        /// </summary>
+        TimeZoneInfo TimeZone { get; set; }
+
+        /// <summary>
+        /// Gets or Sets the default slop for phrases. If zero, then exact phrase matches are
+        /// required. Default value is zero.
+        /// </summary>
+        int PhraseSlop { get; set; }
+
+        Analyzer Analyzer { get; }
+
+        /// <summary>
+        /// Get the minimal similarity for fuzzy queries.
+        /// </summary>
+        float FuzzyMinSim { get; set; }
+
+        /// <summary>
+        /// Sets the default <see cref="T:DateTools.Resolution"/> used for certain field when
+        /// no <see cref="T:DateTools.Resolution"/> is defined for this field.
+        /// </summary>
+        void SetDateResolution(DateTools.Resolution dateResolution);
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj b/src/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
new file mode 100644
index 0000000..e3f8fc6
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
@@ -0,0 +1,107 @@
+\ufeff<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
+  <PropertyGroup>
+    <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+    <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
+    <ProjectGuid>{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}</ProjectGuid>
+    <OutputType>Library</OutputType>
+    <AppDesignerFolder>Properties</AppDesignerFolder>
+    <RootNamespace>Lucene.Net.QueryParser</RootNamespace>
+    <AssemblyName>Lucene.Net.QueryParser</AssemblyName>
+    <TargetFrameworkVersion>v4.5.1</TargetFrameworkVersion>
+    <FileAlignment>512</FileAlignment>
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
+    <DebugSymbols>true</DebugSymbols>
+    <DebugType>full</DebugType>
+    <Optimize>false</Optimize>
+    <OutputPath>bin\Debug\</OutputPath>
+    <DefineConstants>DEBUG;TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+    <DebugType>pdbonly</DebugType>
+    <Optimize>true</Optimize>
+    <OutputPath>bin\Release\</OutputPath>
+    <DefineConstants>TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+  </PropertyGroup>
+  <ItemGroup>
+    <Reference Include="System" />
+    <Reference Include="System.Core" />
+    <Reference Include="System.Xml.Linq" />
+    <Reference Include="System.Data.DataSetExtensions" />
+    <Reference Include="Microsoft.CSharp" />
+    <Reference Include="System.Data" />
+    <Reference Include="System.Xml" />
+  </ItemGroup>
+  <ItemGroup>
+    <Compile Include="Analyzing\AnalyzingQueryParser.cs" />
+    <Compile Include="Classic\CharStream.cs" />
+    <Compile Include="Classic\FastCharStream.cs" />
+    <Compile Include="Classic\MultiFieldQueryParser.cs" />
+    <Compile Include="Classic\ParseException.cs" />
+    <Compile Include="Classic\QueryParser.cs" />
+    <Compile Include="Classic\QueryParserBase.cs" />
+    <Compile Include="Classic\QueryParserConstants.cs" />
+    <Compile Include="Classic\QueryParserTokenManager.cs" />
+    <Compile Include="Classic\Token.cs" />
+    <Compile Include="Classic\TokenMgrError.cs" />
+    <Compile Include="ComplexPhrase\ComplexPhraseQueryParser.cs" />
+    <Compile Include="Ext\ExtendableQueryParser.cs" />
+    <Compile Include="Ext\ExtensionQuery.cs" />
+    <Compile Include="Ext\Extensions.cs" />
+    <Compile Include="Ext\ParserExtension.cs" />
+    <Compile Include="Flexible\Standard\CommonQueryParserConfiguration.cs" />
+    <Compile Include="Properties\AssemblyInfo.cs" />
+    <Compile Include="Simple\SimpleQueryParser.cs" />
+    <Compile Include="Surround\Parser\CharStream.cs" />
+    <Compile Include="Surround\Parser\FastCharStream.cs" />
+    <Compile Include="Surround\Parser\ParseException.cs" />
+    <Compile Include="Surround\Parser\QueryParser.cs" />
+    <Compile Include="Surround\Parser\QueryParserConstants.cs" />
+    <Compile Include="Surround\Parser\QueryParserTokenManager.cs" />
+    <Compile Include="Surround\Parser\Token.cs" />
+    <Compile Include="Surround\Parser\TokenMgrError.cs" />
+    <Compile Include="Surround\Query\AndQuery.cs" />
+    <Compile Include="Surround\Query\BasicQueryFactory.cs" />
+    <Compile Include="Surround\Query\ComposedQuery.cs" />
+    <Compile Include="Surround\Query\DistanceQuery.cs" />
+    <Compile Include="Surround\Query\DistanceRewriteQuery.cs" />
+    <Compile Include="Surround\Query\DistanceSubQuery.cs" />
+    <Compile Include="Surround\Query\FieldsQuery.cs" />
+    <Compile Include="Surround\Query\NotQuery.cs" />
+    <Compile Include="Surround\Query\OrQuery.cs" />
+    <Compile Include="Surround\Query\RewriteQuery.cs" />
+    <Compile Include="Surround\Query\SimpleTerm.cs" />
+    <Compile Include="Surround\Query\SimpleTermRewriteQuery.cs" />
+    <Compile Include="Surround\Query\SpanNearClauseFactory.cs" />
+    <Compile Include="Surround\Query\SrndBooleanQuery.cs" />
+    <Compile Include="Surround\Query\SrndPrefixQuery.cs" />
+    <Compile Include="Surround\Query\SrndQuery.cs" />
+    <Compile Include="Surround\Query\SrndTermQuery.cs" />
+    <Compile Include="Surround\Query\SrndTruncQuery.cs" />
+    <Compile Include="Surround\Query\TooManyBasicQueries.cs" />
+  </ItemGroup>
+  <ItemGroup>
+    <ProjectReference Include="..\Lucene.Net.Analysis.Common\Lucene.Net.Analysis.Common.csproj">
+      <Project>{4add0bbc-b900-4715-9526-d871de8eea64}</Project>
+      <Name>Lucene.Net.Analysis.Common</Name>
+    </ProjectReference>
+    <ProjectReference Include="..\Lucene.Net.Core\Lucene.Net.csproj">
+      <Project>{5d4ad9be-1ffb-41ab-9943-25737971bf57}</Project>
+      <Name>Lucene.Net</Name>
+    </ProjectReference>
+  </ItemGroup>
+  <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
+  <!-- To modify your build process, add your task inside one of the targets below and uncomment it. 
+       Other similar extension points exist, see Microsoft.Common.targets.
+  <Target Name="BeforeBuild">
+  </Target>
+  <Target Name="AfterBuild">
+  </Target>
+  -->
+</Project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Properties/AssemblyInfo.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Properties/AssemblyInfo.cs b/src/Lucene.Net.QueryParser/Properties/AssemblyInfo.cs
new file mode 100644
index 0000000..023bf34
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Properties/AssemblyInfo.cs
@@ -0,0 +1,39 @@
+\ufeffusing System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following 
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("Lucene.Net.QueryParser")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("Lucene.Net.QueryParser")]
+[assembly: AssemblyCopyright("Copyright �  2016")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible 
+// to COM components.  If you need to access a type in this assembly from 
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("7c58cf05-89dd-4c02-a948-c28cdaf05247")]
+
+// for testing
+[assembly: InternalsVisibleTo("Lucene.Net.Tests.QueryParser")]
+
+// Version information for an assembly consists of the following four values:
+//
+//      Major Version
+//      Minor Version 
+//      Build Number
+//      Revision
+//
+// You can specify all the values or you can default the Build and Revision Numbers 
+// by using the '*' as shown below:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
new file mode 100644
index 0000000..1029c8b
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
@@ -0,0 +1,788 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using Lucene.Net.Util.Automaton;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Linq;
+
+namespace Lucene.Net.QueryParser.Simple
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// SimpleQueryParser is used to parse human readable query syntax.
+    /// <p>
+    /// The main idea behind this parser is that a person should be able to type
+    /// whatever they want to represent a query, and this parser will do its best
+    /// to interpret what to search for no matter how poorly composed the request
+    /// may be. Tokens are considered to be any of a term, phrase, or subquery for the
+    /// operations described below.  Whitespace including ' ' '\n' '\r' and '\t'
+    /// and certain operators may be used to delimit tokens ( ) + | " .
+    /// <p>
+    /// Any errors in query syntax will be ignored and the parser will attempt
+    /// to decipher what it can; however, this may mean odd or unexpected results.
+    /// <h4>Query Operators</h4>
+    /// <ul>
+    ///  <li>'{@code +}' specifies {@code AND} operation: <tt>token1+token2</tt>
+    ///  <li>'{@code |}' specifies {@code OR} operation: <tt>token1|token2</tt>
+    ///  <li>'{@code -}' negates a single token: <tt>-token0</tt>
+    ///  <li>'{@code "}' creates phrases of terms: <tt>"term1 term2 ..."</tt>
+    ///  <li>'{@code *}' at the end of terms specifies prefix query: <tt>term*</tt>
+    ///  <li>'{@code ~}N' at the end of terms specifies fuzzy query: <tt>term~1</tt>
+    ///  <li>'{@code ~}N' at the end of phrases specifies near query: <tt>"term1 term2"~5</tt>
+    ///  <li>'{@code (}' and '{@code )}' specifies precedence: <tt>token1 + (token2 | token3)</tt>
+    /// </ul>
+    /// <p>
+    /// The {@link #setDefaultOperator default operator} is {@code OR} if no other operator is specified.
+    /// For example, the following will {@code OR} {@code token1} and {@code token2} together:
+    /// <tt>token1 token2</tt>
+    /// <p>
+    /// Normal operator precedence will be simple order from right to left.
+    /// For example, the following will evaluate {@code token1 OR token2} first,
+    /// then {@code AND} with {@code token3}:
+    /// <blockquote>token1 | token2 + token3</blockquote>
+    /// <h4>Escaping</h4>
+    /// <p>
+    /// An individual term may contain any possible character with certain characters
+    /// requiring escaping using a '{@code \}'.  The following characters will need to be escaped in
+    /// terms and phrases:
+    /// {@code + | " ( ) ' \}
+    /// <p>
+    /// The '{@code -}' operator is a special case.  On individual terms (not phrases) the first
+    /// character of a term that is {@code -} must be escaped; however, any '{@code -}' characters
+    /// beyond the first character do not need to be escaped.
+    /// For example:
+    /// <ul>
+    ///   <li>{@code -term1}   -- Specifies {@code NOT} operation against {@code term1}
+    ///   <li>{@code \-term1}  -- Searches for the term {@code -term1}.
+    ///   <li>{@code term-1}   -- Searches for the term {@code term-1}.
+    ///   <li>{@code term\-1}  -- Searches for the term {@code term-1}.
+    /// </ul>
+    /// <p>
+    /// The '{@code *}' operator is a special case. On individual terms (not phrases) the last
+    /// character of a term that is '{@code *}' must be escaped; however, any '{@code *}' characters
+    /// before the last character do not need to be escaped:
+    /// <ul>
+    ///   <li>{@code term1*}  --  Searches for the prefix {@code term1}
+    ///   <li>{@code term1\*} --  Searches for the term {@code term1*}
+    ///   <li>{@code term*1}  --  Searches for the term {@code term*1}
+    ///   <li>{@code term\*1} --  Searches for the term {@code term*1}
+    /// </ul>
+    /// <p>
+    /// Note that above examples consider the terms before text processing.
+    /// </summary>
+    public class SimpleQueryParser : QueryBuilder
+    {
+        /** Map of fields to query against with their weights */
+        protected readonly IDictionary<string, float> weights;
+
+        // TODO: Make these into a [Flags] enum in .NET??
+        /** flags to the parser (to turn features on/off) */
+        protected readonly int flags;
+
+        /** Enables {@code AND} operator (+) */
+        public static readonly int AND_OPERATOR         = 1<<0;
+        /** Enables {@code NOT} operator (-) */
+        public static readonly int NOT_OPERATOR         = 1<<1;
+        /** Enables {@code OR} operator (|) */
+        public static readonly int OR_OPERATOR          = 1<<2;
+        /** Enables {@code PREFIX} operator (*) */
+        public static readonly int PREFIX_OPERATOR      = 1<<3;
+        /** Enables {@code PHRASE} operator (") */
+        public static readonly int PHRASE_OPERATOR      = 1<<4;
+        /** Enables {@code PRECEDENCE} operators: {@code (} and {@code )} */
+        public static readonly int PRECEDENCE_OPERATORS = 1<<5;
+        /** Enables {@code ESCAPE} operator (\) */
+        public static readonly int ESCAPE_OPERATOR      = 1<<6;
+        /** Enables {@code WHITESPACE} operators: ' ' '\n' '\r' '\t' */
+        public static readonly int WHITESPACE_OPERATOR  = 1<<7;
+        /** Enables {@code FUZZY} operators: (~) on single terms */
+        public static readonly int FUZZY_OPERATOR       = 1<<8;
+        /** Enables {@code NEAR} operators: (~) on phrases */
+        public static readonly int NEAR_OPERATOR        = 1<<9;
+
+        private BooleanClause.Occur defaultOperator = BooleanClause.Occur.SHOULD;
+
+        /// <summary>
+        /// Creates a new parser searching over a single field.
+        /// </summary>
+        /// <param name="analyzer"></param>
+        /// <param name="field"></param>
+        public SimpleQueryParser(Analyzer analyzer, string field)
+            : this(analyzer, new HashMap<string, float>() { { field, 1.0F } })
+        {
+        }
+
+        /// <summary>
+        /// Creates a new parser searching over multiple fields with different weights.
+        /// </summary>
+        /// <param name="analyzer"></param>
+        /// <param name="weights"></param>
+        public SimpleQueryParser(Analyzer analyzer, IDictionary<string, float> weights)
+            : this(analyzer, weights, -1)
+        {
+        }
+
+        /// <summary>
+        /// Creates a new parser with custom flags used to enable/disable certain features.
+        /// </summary>
+        /// <param name="analyzer"></param>
+        /// <param name="weights"></param>
+        /// <param name="flags"></param>
+        public SimpleQueryParser(Analyzer analyzer, IDictionary<string, float> weights, int flags)
+            : base(analyzer)
+        {
+            this.weights = weights;
+            this.flags = flags;
+        }
+
+        /// <summary>
+        /// Parses the query text and returns parsed query (or null if empty)
+        /// </summary>
+        /// <param name="queryText"></param>
+        /// <returns></returns>
+        public Query Parse(string queryText)
+        {
+            char[] data = queryText.ToCharArray();
+            char[] buffer = new char[data.Length];
+
+            State state = new State(data, buffer, 0, data.Length);
+            ParseSubQuery(state);
+            return state.Top;
+        }
+
+        private void ParseSubQuery(State state)
+        {
+            while (state.Index < state.Length)
+            {
+                if (state.Data[state.Index] == '(' && (flags & PRECEDENCE_OPERATORS) != 0)
+                {
+                    // the beginning of a subquery has been found
+                    ConsumeSubQuery(state);
+                }
+                else if (state.Data[state.Index] == ')' && (flags & PRECEDENCE_OPERATORS) != 0)
+                {
+                    // this is an extraneous character so it is ignored
+                    ++state.Index;
+                }
+                else if (state.Data[state.Index] == '"' && (flags & PHRASE_OPERATOR) != 0)
+                {
+                    // the beginning of a phrase has been found
+                    ConsumePhrase(state);
+                }
+                else if (state.Data[state.Index] == '+' && (flags & AND_OPERATOR) != 0)
+                {
+                    // an and operation has been explicitly set
+                    // if an operation has already been set this one is ignored
+                    // if a term (or phrase or subquery) has not been found yet the
+                    // operation is also ignored since there is no previous
+                    // term (or phrase or subquery) to and with
+                    if (!state.CurrentOperationIsSet && state.Top != null)
+                    {
+                        state.CurrentOperation = BooleanClause.Occur.MUST;
+                    }
+
+                    ++state.Index;
+                }
+                else if (state.Data[state.Index] == '|' && (flags & OR_OPERATOR) != 0)
+                {
+                    // an or operation has been explicitly set
+                    // if an operation has already been set this one is ignored
+                    // if a term (or phrase or subquery) has not been found yet the
+                    // operation is also ignored since there is no previous
+                    // term (or phrase or subquery) to or with
+                    if (!state.CurrentOperationIsSet && state.Top != null)
+                    {
+                        state.CurrentOperation = BooleanClause.Occur.SHOULD;
+                    }
+
+                    ++state.Index;
+                }
+                else if (state.Data[state.Index] == '-' && (flags & NOT_OPERATOR) != 0)
+                {
+                    // a not operator has been found, so increase the not count
+                    // two not operators in a row negate each other
+                    ++state.Not;
+                    ++state.Index;
+
+                    // continue so the not operator is not reset
+                    // before the next character is determined
+                    continue;
+                }
+                else if ((state.Data[state.Index] == ' '
+                  || state.Data[state.Index] == '\t'
+                  || state.Data[state.Index] == '\n'
+                  || state.Data[state.Index] == '\r') && (flags & WHITESPACE_OPERATOR) != 0)
+                {
+                    // ignore any whitespace found as it may have already been
+                    // used a delimiter across a term (or phrase or subquery)
+                    // or is simply extraneous
+                    ++state.Index;
+                }
+                else
+                {
+                    // the beginning of a token has been found
+                    ConsumeToken(state);
+                }
+
+                // reset the not operator as even whitespace is not allowed when
+                // specifying the not operation for a term (or phrase or subquery)
+                state.Not = 0;
+            }
+        }
+
+        private void ConsumeSubQuery(State state)
+        {
+            Debug.Assert((flags & PRECEDENCE_OPERATORS) != 0);
+            int start = ++state.Index;
+            int precedence = 1;
+            bool escaped = false;
+
+            while (state.Index < state.Length)
+            {
+                if (!escaped)
+                {
+                    if (state.Data[state.Index] == '\\' && (flags & ESCAPE_OPERATOR) != 0)
+                    {
+                        // an escape character has been found so
+                        // whatever character is next will become
+                        // part of the subquery unless the escape
+                        // character is the last one in the data
+                        escaped = true;
+                        ++state.Index;
+
+                        continue;
+                    }
+                    else if (state.Data[state.Index] == '(')
+                    {
+                        // increase the precedence as there is a
+                        // subquery in the current subquery
+                        ++precedence;
+                    }
+                    else if (state.Data[state.Index] == ')')
+                    {
+                        --precedence;
+
+                        if (precedence == 0)
+                        {
+                            // this should be the end of the subquery
+                            // all characters found will used for
+                            // creating the subquery
+                            break;
+                        }
+                    }
+                }
+
+                escaped = false;
+                ++state.Index;
+            }
+
+            if (state.Index == state.Length)
+            {
+                // a closing parenthesis was never found so the opening
+                // parenthesis is considered extraneous and will be ignored
+                state.Index = start;
+            }
+            else if (state.Index == start)
+            {
+                // a closing parenthesis was found immediately after the opening
+                // parenthesis so the current operation is reset since it would
+                // have been applied to this subquery
+                state.CurrentOperationIsSet = false;
+
+                ++state.Index;
+            }
+            else
+            {
+                // a complete subquery has been found and is recursively parsed by
+                // starting over with a new state object
+                State subState = new State(state.Data, state.Buffer, start, state.Index);
+                ParseSubQuery(subState);
+                BuildQueryTree(state, subState.Top);
+
+                ++state.Index;
+            }
+        }
+
+        private void ConsumePhrase(State state)
+        {
+            Debug.Assert((flags & PHRASE_OPERATOR) != 0);
+            int start = ++state.Index;
+            int copied = 0;
+            bool escaped = false;
+            bool hasSlop = false;
+
+            while (state.Index < state.Length)
+            {
+                if (!escaped)
+                {
+                    if (state.Data[state.Index] == '\\' && (flags & ESCAPE_OPERATOR) != 0)
+                    {
+                        // an escape character has been found so
+                        // whatever character is next will become
+                        // part of the phrase unless the escape
+                        // character is the last one in the data
+                        escaped = true;
+                        ++state.Index;
+
+                        continue;
+                    }
+                    else if (state.Data[state.Index] == '"')
+                    {
+                        // if there are still characters after the closing ", check for a
+                        // tilde
+                        if (state.Length > (state.Index + 1) &&
+                            state.Data[state.Index + 1] == '~' &&
+                            (flags & NEAR_OPERATOR) != 0)
+                        {
+                            state.Index++;
+                            // check for characters after the tilde
+                            if (state.Length > (state.Index + 1))
+                            {
+                                hasSlop = true;
+                            }
+                            break;
+                        }
+                        else
+                        {
+                            // this should be the end of the phrase
+                            // all characters found will used for
+                            // creating the phrase query
+                            break;
+                        }
+                    }
+                }
+
+                escaped = false;
+                state.Buffer[copied++] = state.Data[state.Index++];
+            }
+
+            if (state.Index == state.Length)
+            {
+                // a closing double quote was never found so the opening
+                // double quote is considered extraneous and will be ignored
+                state.Index = start;
+            }
+            else if (state.Index == start)
+            {
+                // a closing double quote was found immediately after the opening
+                // double quote so the current operation is reset since it would
+                // have been applied to this phrase
+                state.CurrentOperationIsSet = false;
+
+                ++state.Index;
+            }
+            else
+            {
+                // a complete phrase has been found and is parsed through
+                // through the analyzer from the given field
+                string phrase = new string(state.Buffer, 0, copied);
+                Query branch;
+                if (hasSlop)
+                {
+                    branch = NewPhraseQuery(phrase, ParseFuzziness(state));
+                }
+                else
+                {
+                    branch = NewPhraseQuery(phrase, 0);
+                }
+                BuildQueryTree(state, branch);
+
+                ++state.Index;
+            }
+        }
+
+        private void ConsumeToken(State state)
+        {
+            int copied = 0;
+            bool escaped = false;
+            bool prefix = false;
+            bool fuzzy = false;
+
+            while (state.Index < state.Length)
+            {
+                if (!escaped)
+                {
+                    if (state.Data[state.Index] == '\\' && (flags & ESCAPE_OPERATOR) != 0)
+                    {
+                        // an escape character has been found so
+                        // whatever character is next will become
+                        // part of the term unless the escape
+                        // character is the last one in the data
+                        escaped = true;
+                        prefix = false;
+                        ++state.Index;
+
+                        continue;
+                    }
+                    else if (TokenFinished(state))
+                    {
+                        // this should be the end of the term
+                        // all characters found will used for
+                        // creating the term query
+                        break;
+                    }
+                    else if (copied > 0 && state.Data[state.Index] == '~' && (flags & FUZZY_OPERATOR) != 0)
+                    {
+                        fuzzy = true;
+                        break;
+                    }
+
+                    // wildcard tracks whether or not the last character
+                    // was a '*' operator that hasn't been escaped
+                    // there must be at least one valid character before
+                    // searching for a prefixed set of terms
+                    prefix = copied > 0 && state.Data[state.Index] == '*' && (flags & PREFIX_OPERATOR) != 0;
+                }
+
+                escaped = false;
+                state.Buffer[copied++] = state.Data[state.Index++];
+            }
+
+            if (copied > 0)
+            {
+                Query branch;
+
+                if (fuzzy && (flags & FUZZY_OPERATOR) != 0)
+                {
+                    string token = new string(state.Buffer, 0, copied);
+                    int fuzziness = ParseFuzziness(state);
+                    // edit distance has a maximum, limit to the maximum supported
+                    fuzziness = Math.Min(fuzziness, LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE);
+                    if (fuzziness == 0)
+                    {
+                        branch = NewDefaultQuery(token);
+                    }
+                    else
+                    {
+                        branch = NewFuzzyQuery(token, fuzziness);
+                    }
+                }
+                else if (prefix)
+                {
+                    // if a term is found with a closing '*' it is considered to be a prefix query
+                    // and will have prefix added as an option
+                    string token = new string(state.Buffer, 0, copied - 1);
+                    branch = NewPrefixQuery(token);
+                }
+                else
+                {
+                    // a standard term has been found so it will be run through
+                    // the entire analysis chain from the specified schema field
+                    string token = new string(state.Buffer, 0, copied);
+                    branch = NewDefaultQuery(token);
+                }
+
+                BuildQueryTree(state, branch);
+            }
+        }
+
+        /// <summary>
+        /// buildQueryTree should be called after a term, phrase, or subquery
+        /// is consumed to be added to our existing query tree
+        /// this method will only add to the existing tree if the branch contained in state is not null
+        /// </summary>
+        /// <param name="state"></param>
+        /// <param name="branch"></param>
+        private void BuildQueryTree(State state, Query branch)
+        {
+            if (branch != null)
+            {
+                // modify our branch to a BooleanQuery wrapper for not
+                // this is necessary any time a term, phrase, or subquery is negated
+                if (state.Not % 2 == 1)
+                {
+                    BooleanQuery nq = new BooleanQuery();
+                    nq.Add(branch, BooleanClause.Occur.MUST_NOT);
+                    nq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
+                    branch = nq;
+                }
+
+                // first term (or phrase or subquery) found and will begin our query tree
+                if (state.Top == null)
+                {
+                    state.Top = branch;
+                }
+                else
+                {
+                    // more than one term (or phrase or subquery) found
+                    // set currentOperation to the default if no other operation is explicitly set
+                    if (!state.CurrentOperationIsSet)
+                    {
+                        state.CurrentOperation = defaultOperator;
+                    }
+
+                    // operational change requiring a new parent node
+                    // this occurs if the previous operation is not the same as current operation
+                    // because the previous operation must be evaluated separately to preserve
+                    // the proper precedence and the current operation will take over as the top of the tree
+                    if (!state.PreviousOperationIsSet || state.PreviousOperation != state.CurrentOperation)
+                    {
+                        BooleanQuery bq = new BooleanQuery();
+                        bq.Add(state.Top, state.CurrentOperation);
+                        state.Top = bq;
+                    }
+
+                    // reset all of the state for reuse
+                    ((BooleanQuery)state.Top).Add(branch, state.CurrentOperation);
+                    state.PreviousOperation = state.CurrentOperation;
+                }
+
+                // reset the current operation as it was intended to be applied to
+                // the incoming term (or phrase or subquery) even if branch was null
+                // due to other possible errors
+                state.CurrentOperationIsSet = false;
+            }
+        }
+
+        /// <summary>
+        /// Helper parsing fuzziness from parsing state
+        /// </summary>
+        /// <param name="state"></param>
+        /// <returns>slop/edit distance, 0 in the case of non-parsing slop/edit string</returns>
+        private int ParseFuzziness(State state)
+        {
+            char[] slopText = new char[state.Length];
+            int slopLength = 0;
+
+            if (state.Data[state.Index] == '~')
+            {
+                while (state.Index < state.Length)
+                {
+                    state.Index++;
+                    // it's possible that the ~ was at the end, so check after incrementing
+                    // to make sure we don't go out of bounds
+                    if (state.Index < state.Length)
+                    {
+                        if (TokenFinished(state))
+                        {
+                            break;
+                        }
+                        slopText[slopLength] = state.Data[state.Index];
+                        slopLength++;
+                    }
+                }
+                int fuzziness = 0;
+                int.TryParse(new string(slopText, 0, slopLength), out fuzziness);
+                // negative -> 0
+                if (fuzziness < 0)
+                {
+                    fuzziness = 0;
+                }
+                return fuzziness;
+            }
+            return 0;
+        }
+
+        /// <summary>
+        /// Helper returning true if the state has reached the end of token.
+        /// </summary>
+        /// <param name="state"></param>
+        /// <returns></returns>
+        private bool TokenFinished(State state)
+        {
+            if ((state.Data[state.Index] == '"' && (flags & PHRASE_OPERATOR) != 0)
+                || (state.Data[state.Index] == '|' && (flags & OR_OPERATOR) != 0)
+                || (state.Data[state.Index] == '+' && (flags & AND_OPERATOR) != 0)
+                || (state.Data[state.Index] == '(' && (flags & PRECEDENCE_OPERATORS) != 0)
+                || (state.Data[state.Index] == ')' && (flags & PRECEDENCE_OPERATORS) != 0)
+                || ((state.Data[state.Index] == ' '
+                || state.Data[state.Index] == '\t'
+                || state.Data[state.Index] == '\n'
+                || state.Data[state.Index] == '\r') && (flags & WHITESPACE_OPERATOR) != 0))
+            {
+                return true;
+            }
+            return false;
+        }
+
+        /// <summary>
+        /// Factory method to generate a standard query (no phrase or prefix operators).
+        /// </summary>
+        /// <param name="text"></param>
+        /// <returns></returns>
+        protected virtual Query NewDefaultQuery(string text)
+        {
+            BooleanQuery bq = new BooleanQuery(true);
+            foreach (var entry in weights)
+            {
+                Query q = CreateBooleanQuery(entry.Key, text, defaultOperator);
+                if (q != null)
+                {
+                    q.Boost = entry.Value;
+                    bq.Add(q, BooleanClause.Occur.SHOULD);
+                }
+            }
+            return Simplify(bq);
+        }
+
+        /// <summary>
+        /// Factory method to generate a fuzzy query.
+        /// </summary>
+        /// <param name="text"></param>
+        /// <param name="fuzziness"></param>
+        /// <returns></returns>
+        protected virtual Query NewFuzzyQuery(string text, int fuzziness)
+        {
+            BooleanQuery bq = new BooleanQuery(true);
+            foreach (var entry in weights)
+            {
+                Query q = new FuzzyQuery(new Term(entry.Key, text), fuzziness);
+                if (q != null)
+                {
+                    q.Boost = entry.Value;
+                    bq.Add(q, BooleanClause.Occur.SHOULD);
+                }
+            }
+            return Simplify(bq);
+        }
+
+        /// <summary>
+        /// Factory method to generate a phrase query with slop.
+        /// </summary>
+        /// <param name="text"></param>
+        /// <param name="slop"></param>
+        /// <returns></returns>
+        protected virtual Query NewPhraseQuery(string text, int slop)
+        {
+            BooleanQuery bq = new BooleanQuery(true);
+            foreach (var entry in weights)
+            {
+                Query q = CreatePhraseQuery(entry.Key, text, slop);
+                if (q != null)
+                {
+                    q.Boost = entry.Value;
+                    bq.Add(q, BooleanClause.Occur.SHOULD);
+                }
+            }
+            return Simplify(bq);
+        }
+
+        /// <summary>
+        /// Factory method to generate a prefix query.
+        /// </summary>
+        /// <param name="text"></param>
+        /// <returns></returns>
+        protected virtual Query NewPrefixQuery(string text)
+        {
+            BooleanQuery bq = new BooleanQuery(true);
+            foreach (var entry in weights)
+            {
+                PrefixQuery prefix = new PrefixQuery(new Term(entry.Key, text));
+                prefix.Boost = entry.Value;
+                bq.Add(prefix, BooleanClause.Occur.SHOULD);
+            }
+            return Simplify(bq);
+        }
+
+        /// <summary>
+        /// Helper to simplify boolean queries with 0 or 1 clause
+        /// </summary>
+        /// <param name="bq"></param>
+        /// <returns></returns>
+        protected virtual Query Simplify(BooleanQuery bq)
+        {
+            if (!bq.Clauses.Any())
+            {
+                return null;
+            }
+            else if (bq.Clauses.Length == 1)
+            {
+                return bq.Clauses[0].Query;
+            }
+            else
+            {
+                return bq;
+            }
+        }
+
+        /// <summary>
+        /// Gets or Sets the implicit operator setting, which will be
+        /// either {@code SHOULD} or {@code MUST}.
+        /// </summary>
+        public virtual BooleanClause.Occur DefaultOperator
+        {
+            get { return defaultOperator; }
+            set { defaultOperator = value; }
+        }
+
+
+        public class State
+        {
+            //private readonly char[] data;   // the characters in the query string
+            //private readonly char[] buffer; // a temporary buffer used to reduce necessary allocations
+            //private int index;
+            //private int length;
+
+            private BooleanClause.Occur currentOperation;
+            private BooleanClause.Occur previousOperation;
+            //private int not;
+
+            //private Query top;
+
+            internal State(char[] data, char[] buffer, int index, int length)
+            {
+                this.Data = data;
+                this.Buffer = buffer;
+                this.Index = index;
+                this.Length = length;
+            }
+
+            public char[] Data { get; protected set; } // the characters in the query string
+            public char[] Buffer { get; protected set; } // a temporary buffer used to reduce necessary allocations
+            public int Index { get; set; }
+            public int Length { get; protected set; }
+
+            public BooleanClause.Occur CurrentOperation 
+            {
+                get 
+                { 
+                    return currentOperation; 
+                }
+                set
+                {
+                    currentOperation = value;
+                    CurrentOperationIsSet = true;
+                }
+            }
+
+            public BooleanClause.Occur PreviousOperation
+            {
+                get
+                {
+                    return previousOperation;
+                }
+                set
+                {
+                    previousOperation = value;
+                    PreviousOperationIsSet = true;
+                }
+            }
+
+            public bool CurrentOperationIsSet { get; set; }
+            public bool PreviousOperationIsSet { get; set; }
+
+            public int Not { get; set; }
+            public Query Top { get; set; }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs b/src/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs
new file mode 100644
index 0000000..bfb2fc2
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs
@@ -0,0 +1,134 @@
+\ufeffusing System;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+
+    /// <summary> 
+    /// This interface describes a character stream that maintains line and
+    /// column number positions of the characters.  It also has the capability
+    /// to backup the stream to some extent.  An implementation of this
+    /// interface is used in the TokenManager implementation generated by
+    /// JavaCCParser.
+    /// 
+    /// All the methods except backup can be implemented in any fashion. backup
+    /// needs to be implemented correctly for the correct operation of the lexer.
+    /// Rest of the methods are all used to get information like line number,
+    /// column number and the String that constitutes a token and are not used
+    /// by the lexer. Hence their implementation won't affect the generated lexer's
+    /// operation.
+    /// </summary>
+    public interface ICharStream
+    {
+        /// <summary> 
+        /// Returns the next character from the selected input.  The method
+        /// of selecting the input is the responsibility of the class
+        /// implementing this interface.  Can throw any java.io.IOException.
+        /// </summary>
+        char ReadChar();
+
+        /// <summary>
+        /// Returns the column position of the character last read.
+        /// </summary>
+        /// <deprecated>
+        /// </deprecated>
+        /// <seealso cref="EndColumn">
+        /// </seealso>
+        [Obsolete]
+        int Column { get; }
+
+        /// <summary>
+        /// Returns the line number of the character last read.
+        /// </summary>
+        /// <deprecated>
+        /// </deprecated>
+        /// <seealso cref="EndLine">
+        /// </seealso>
+        [Obsolete]
+        int Line { get; }
+
+        /// <summary>
+        /// Returns the column number of the last character for current token (being
+        /// matched after the last call to BeginTOken).
+        /// </summary>
+        int EndColumn { get; }
+
+        /// <summary> 
+        /// Returns the line number of the last character for current token (being
+        /// matched after the last call to BeginTOken).
+        /// </summary>
+        int EndLine { get; }
+
+        /// <summary> 
+        /// Returns the column number of the first character for current token (being
+        /// matched after the last call to BeginTOken).
+        /// </summary>
+        int BeginColumn { get; }
+
+        /// <summary> 
+        /// Returns the line number of the first character for current token (being
+        /// matched after the last call to BeginTOken).
+        /// </summary>
+        int BeginLine { get; }
+
+        /// <summary> 
+        /// Backs up the input stream by amount steps. Lexer calls this method if it
+        /// had already read some characters, but could not use them to match a
+        /// (longer) token. So, they will be used again as the prefix of the next
+        /// token and it is the implemetation's responsibility to do this right.
+        /// </summary>
+        void Backup(int amount);
+
+        /// <summary> 
+        /// Returns the next character that marks the beginning of the next token.
+        /// All characters must remain in the buffer between two successive calls
+        /// to this method to implement backup correctly.
+        /// </summary>
+        char BeginToken();
+
+        /// <summary> 
+        /// Returns a string made up of characters from the marked token beginning
+        /// to the current buffer position. Implementations have the choice of returning
+        /// anything that they want to. For example, for efficiency, one might decide
+        /// to just return null, which is a valid implementation.
+        /// </summary>
+        string Image { get; }
+
+        /// <summary> 
+        /// Returns an array of characters that make up the suffix of length 'len' for
+        /// the currently matched token. This is used to build up the matched string
+        /// for use in actions in the case of MORE. A simple and inefficient
+        /// implementation of this is as follows :
+        /// 
+        /// {
+        /// String t = GetImage();
+        /// return t.substring(t.length() - len, t.length()).toCharArray();
+        /// }
+        /// </summary>
+        char[] GetSuffix(int len);
+
+        /// <summary> 
+        /// The lexer calls this function to indicate that it is done with the stream
+        /// and hence implementations can free any resources held by this class.
+        /// Again, the body of this function can be just empty and it will not
+        /// affect the lexer's operation.
+        /// </summary>
+        void Done();
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs b/src/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs
new file mode 100644
index 0000000..b33bd83
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs
@@ -0,0 +1,158 @@
+\ufeffusing System;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// An efficient implementation of JavaCC's CharStream interface.  <p/>Note that
+    /// this does not do line-number counting, but instead keeps track of the
+    /// character position of the token in the input, as required by Lucene's <see cref="Lucene.Net.Analysis.Token" />
+    /// API.
+    /// </summary>
+    public sealed class FastCharStream : ICharStream
+    {
+        internal char[] buffer = null;
+
+        internal int bufferLength = 0; // end of valid chars
+        internal int bufferPosition = 0; // next char to read
+
+        internal int tokenStart = 0; // offset in buffer
+        internal int bufferStart = 0; // position in file of buffer
+
+        internal System.IO.TextReader input; // source of chars
+
+        /// <summary>
+        /// Constructs from a Reader. 
+        /// </summary>
+        public FastCharStream(System.IO.TextReader r)
+        {
+            input = r;
+        }
+
+        public char ReadChar()
+        {
+            if (bufferPosition >= bufferLength)
+                Refill();
+            return buffer[bufferPosition++];
+        }
+
+        private void Refill()
+        {
+            int newPosition = bufferLength - tokenStart;
+
+            if (tokenStart == 0)
+            {
+                // token won't fit in buffer
+                if (buffer == null)
+                {
+                    // first time: alloc buffer
+                    buffer = new char[2048];
+                }
+                else if (bufferLength == buffer.Length)
+                {
+                    // grow buffer
+                    char[] newBuffer = new char[buffer.Length * 2];
+                    Array.Copy(buffer, 0, newBuffer, 0, bufferLength);
+                    buffer = newBuffer;
+                }
+            }
+            else
+            {
+                // shift token to front
+                Array.Copy(buffer, tokenStart, buffer, 0, newPosition);
+            }
+
+            bufferLength = newPosition; // update state
+            bufferPosition = newPosition;
+            bufferStart += tokenStart;
+            tokenStart = 0;
+
+            int charsRead = input.Read(buffer, newPosition, buffer.Length - newPosition);
+            if (charsRead <= 0)
+                throw new System.IO.IOException("read past eof");
+            else
+                bufferLength += charsRead;
+        }
+
+        public char BeginToken()
+        {
+            tokenStart = bufferPosition;
+            return ReadChar();
+        }
+
+        public void Backup(int amount)
+        {
+            bufferPosition -= amount;
+        }
+
+        public string Image
+        {
+            get { return new System.String(buffer, tokenStart, bufferPosition - tokenStart); }
+        }
+
+        public char[] GetSuffix(int len)
+        {
+            char[] value_Renamed = new char[len];
+            Array.Copy(buffer, bufferPosition - len, value_Renamed, 0, len);
+            return value_Renamed;
+        }
+
+        public void Done()
+        {
+            try
+            {
+                input.Close();
+            }
+            catch (System.IO.IOException e)
+            {
+                System.Console.Error.WriteLine("Caught: " + e + "; ignoring.");
+            }
+        }
+
+        public int Column
+        {
+            get { return bufferStart + bufferPosition; }
+        }
+
+        public int Line
+        {
+            get { return 1; }
+        }
+
+        public int EndColumn
+        {
+            get { return bufferStart + bufferPosition; }
+        }
+
+        public int EndLine
+        {
+            get { return 1; }
+        }
+
+        public int BeginColumn
+        {
+            get { return bufferStart + tokenStart; }
+        }
+
+        public int BeginLine
+        {
+            get { return 1; }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs b/src/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
new file mode 100644
index 0000000..1716658
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
@@ -0,0 +1,234 @@
+\ufeffusing System;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary> 
+    /// This exception is thrown when parse errors are encountered.
+    /// You can explicitly create objects of this exception type by
+    /// calling the method GenerateParseException in the generated
+    /// parser.
+    /// 
+    /// You can modify this class to customize your error reporting
+    /// mechanisms so long as you retain the public fields.
+    /// </summary>
+    [Serializable]
+    public class ParseException : Exception
+    {
+        /// <summary>
+        /// This constructor is used by the method "GenerateParseException"
+        /// in the generated parser.  Calling this constructor generates
+        /// a new object of this type with the fields "currentToken",
+        /// "expectedTokenSequences", and "tokenImage" set.
+        /// </summary>
+        /// <param name="currentTokenVal"></param>
+        /// <param name="expectedTokenSequencesVal"></param>
+        /// <param name="tokenImageVal"></param>
+        public ParseException(Token currentTokenVal,
+                        int[][] expectedTokenSequencesVal,
+                        string[] tokenImageVal)
+            : base(Initialize(currentTokenVal, expectedTokenSequencesVal, tokenImageVal))
+        {
+            currentToken = currentTokenVal;
+            expectedTokenSequences = expectedTokenSequencesVal;
+            tokenImage = tokenImageVal;
+        }
+
+        /**
+         * The following constructors are for use by you for whatever
+         * purpose you can think of.  Constructing the exception in this
+         * manner makes the exception behave in the normal way - i.e., as
+         * documented in the class "Throwable".  The fields "errorToken",
+         * "expectedTokenSequences", and "tokenImage" do not contain
+         * relevant information.  The JavaCC generated code does not use
+         * these constructors.
+         */
+
+        public ParseException()
+        { }
+
+        public ParseException(string message)
+            : base(message)
+        { }
+
+        public ParseException(string message, Exception innerException)
+            : base(message, innerException)
+        { }
+
+
+        /// <summary> 
+        /// This is the last token that has been consumed successfully.  If
+        /// this object has been created due to a parse error, the token
+        /// following this token will (therefore) be the first error token.
+        /// </summary>
+        public Token currentToken;
+
+        /// <summary> 
+        /// Each entry in this array is an array of integers.  Each array
+        /// of integers represents a sequence of tokens (by their ordinal
+        /// values) that is expected at this point of the parse.
+        /// </summary>
+        public int[][] expectedTokenSequences;
+
+        /// <summary> 
+        /// This is a reference to the "tokenImage" array of the generated
+        /// parser within which the parse error occurred.  This array is
+        /// defined in the generated ...Constants interface.
+        /// </summary>
+        public string[] tokenImage;
+
+
+        /// <summary>
+        /// It uses "currentToken" and "expectedTokenSequences" to generate a parse
+        /// error message and returns it.  If this object has been created
+        /// due to a parse error, and you do not catch it (it gets thrown
+        /// from the parser) the correct error message
+        /// gets displayed.
+        /// </summary>
+        /// <param name="currentToken"></param>
+        /// <param name="expectedTokenSequences"></param>
+        /// <param name="tokenImage"></param>
+        /// <returns></returns>
+        private static string Initialize(Token currentToken,
+            int[][] expectedTokenSequences,
+            string[] tokenImage)
+        {
+
+            StringBuilder expected = new StringBuilder();
+            int maxSize = 0;
+            for (int i = 0; i < expectedTokenSequences.Length; i++)
+            {
+                if (maxSize < expectedTokenSequences[i].Length)
+                {
+                    maxSize = expectedTokenSequences[i].Length;
+                }
+                for (int j = 0; j < expectedTokenSequences[i].Length; j++)
+                {
+                    expected.Append(tokenImage[expectedTokenSequences[i][j]]).Append(' ');
+                }
+                if (expectedTokenSequences[i][expectedTokenSequences[i].Length - 1] != 0)
+                {
+                    expected.Append("...");
+                }
+                expected.Append(eol).Append("    ");
+            }
+            string retval = "Encountered \"";
+            Token tok = currentToken.next;
+            for (int i = 0; i < maxSize; i++)
+            {
+                if (i != 0)
+                    retval += " ";
+                if (tok.kind == 0)
+                {
+                    retval += tokenImage[0];
+                    break;
+                }
+                retval += (" " + tokenImage[tok.kind]);
+                retval += " \"";
+                retval += Add_escapes(tok.image);
+                retval += " \"";
+                tok = tok.next;
+            }
+            retval += ("\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn);
+            retval += ("." + eol);
+            if (expectedTokenSequences.Length == 1)
+            {
+                retval += ("Was expecting:" + eol + "    ");
+            }
+            else
+            {
+                retval += ("Was expecting one of:" + eol + "    ");
+            }
+            retval += expected.ToString();
+            return retval;
+        }
+
+        /// <summary> 
+        /// The end of line string for this machine.
+        /// </summary>
+        protected static string eol = Environment.NewLine;
+
+        /// <summary> 
+        /// Used to convert raw characters to their escaped version
+        /// when these raw version cannot be used as part of an ASCII
+        /// string literal.
+        /// </summary>
+        internal static string Add_escapes(string str)
+        {
+            StringBuilder retval = new StringBuilder();
+            char ch;
+            for (int i = 0; i < str.Length; i++)
+            {
+                switch (str[i])
+                {
+
+                    case (char)(0):
+                        continue;
+
+                    case '\b':
+                        retval.Append("\\b");
+                        continue;
+
+                    case '\t':
+                        retval.Append("\\t");
+                        continue;
+
+                    case '\n':
+                        retval.Append("\\n");
+                        continue;
+
+                    case '\f':
+                        retval.Append("\\f");
+                        continue;
+
+                    case '\r':
+                        retval.Append("\\r");
+                        continue;
+
+                    case '\"':
+                        retval.Append("\\\"");
+                        continue;
+
+                    case '\'':
+                        retval.Append("\\\'");
+                        continue;
+
+                    case '\\':
+                        retval.Append("\\\\");
+                        continue;
+
+                    default:
+                        if ((ch = str[i]) < 0x20 || ch > 0x7e)
+                        {
+                            System.String s = "0000" + System.Convert.ToString(ch, 16);
+                            retval.Append("\\u" + s.Substring(s.Length - 4, (s.Length) - (s.Length - 4)));
+                        }
+                        else
+                        {
+                            retval.Append(ch);
+                        }
+                        continue;
+
+                }
+            }
+            return retval.ToString();
+        }
+    }
+}
\ No newline at end of file


[37/50] [abbrv] lucenenet git commit: Fixed accessibility of classes and members to match that of Java.

Posted by sy...@apache.org.
Fixed accessibility of classes and members to match that of Java.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/387d985b
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/387d985b
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/387d985b

Branch: refs/heads/master
Commit: 387d985b71edd4e7962a9fdbe6124d7cab3262f0
Parents: 679ad24
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Tue Aug 2 18:27:12 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:30:55 2016 +0700

----------------------------------------------------------------------
 .../Analyzing/AnalyzingQueryParser.cs           |  2 +-
 .../Classic/QueryParser.cs                      |  8 ++--
 .../Classic/QueryParserBase.cs                  | 44 ++++++++++----------
 .../Classic/QueryParserTokenManager.cs          | 16 +++----
 .../Classic/TokenMgrError.cs                    |  8 ++--
 .../ComplexPhrase/ComplexPhraseQueryParser.cs   |  2 +-
 .../Ext/ExtendableQueryParser.cs                |  2 +-
 .../Ext/ExtensionQuery.cs                       |  6 +--
 src/Lucene.Net.QueryParser/Ext/Extensions.cs    |  8 ++--
 .../Surround/Parser/QueryParserTokenManager.cs  |  3 +-
 .../Surround/Parser/TokenMgrError.cs            |  8 ++--
 .../Surround/Query/BasicQueryFactory.cs         |  8 ++--
 .../Surround/Query/ComposedQuery.cs             |  2 +-
 .../Surround/Query/DistanceQuery.cs             |  2 +-
 .../Surround/Query/FieldsQuery.cs               |  2 +-
 .../Surround/Query/RewriteQuery.cs              |  2 +-
 .../Surround/Query/SimpleTerm.cs                |  4 +-
 .../Surround/Query/SimpleTermRewriteQuery.cs    |  2 +-
 .../Surround/Query/SpanNearClauseFactory.cs     |  2 +-
 .../Surround/Query/SrndBooleanQuery.cs          |  2 +-
 .../Surround/Query/SrndQuery.cs                 |  1 -
 .../Classic/TestQueryParser.cs                  |  2 +-
 22 files changed, 67 insertions(+), 69 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs b/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
index 8930aa4..d61b9d0 100644
--- a/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
@@ -138,7 +138,7 @@ namespace Lucene.Net.QueryParser.Analyzing
         /// <param name="chunk">The portion of the given termStr to be analyzed</param>
         /// <returns>The result of analyzing the given chunk</returns>
         /// <exception cref="ParseException">ParseException when analysis returns other than one output token</exception>
-        protected internal string AnalyzeSingleChunk(string field, string termStr, string chunk)
+        protected internal virtual string AnalyzeSingleChunk(string field, string termStr, string chunk)
         {
             string analyzed = null;
             TokenStream stream = null;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParser.cs b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
index e86c716..f52b4f7 100644
--- a/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
@@ -191,7 +191,7 @@ namespace Lucene.Net.QueryParser.Classic
         }
 
         // This makes sure that there is no garbage after the query string
-        public override Query TopLevelQuery(string field)
+        public override sealed Query TopLevelQuery(string field)
         {
             Query q;
             q = Query(field);
@@ -322,7 +322,7 @@ namespace Lucene.Net.QueryParser.Classic
             throw new Exception("Missing return statement in function");
         }
 
-        public Query Term(String field)
+        public Query Term(string field)
         {
             Token term, boost = null, fuzzySlop = null, goop1, goop2;
             bool prefix = false;
@@ -618,7 +618,7 @@ namespace Lucene.Net.QueryParser.Classic
             };
         }
 
-        private JJCalls[] jj_2_rtns = new JJCalls[1];
+        private readonly JJCalls[] jj_2_rtns = new JJCalls[1];
         private bool jj_rescan = false;
         private int jj_gc = 0;
 
@@ -656,7 +656,7 @@ namespace Lucene.Net.QueryParser.Classic
         }
 
         /// <summary>Reinitialise. </summary>
-        public void ReInit(QueryParserTokenManager tm)
+        public virtual void ReInit(QueryParserTokenManager tm)
         {
             token_source = tm;
             token = new Token();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs b/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
index 599110e..3e80f4a 100644
--- a/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
@@ -32,7 +32,7 @@ namespace Lucene.Net.QueryParser.Classic
      * limitations under the License.
      */
 
-    public abstract partial class QueryParserBase : QueryBuilder, ICommonQueryParserConfiguration
+    public abstract class QueryParserBase : QueryBuilder, ICommonQueryParserConfiguration
     {
         /// <summary>
         /// Do not catch this exception in your code, it means you are using methods that you should no longer use.
@@ -138,7 +138,7 @@ namespace Lucene.Net.QueryParser.Classic
         /// <param name="matchVersion">Lucene version to match.</param>
         /// <param name="f">the default field for query terms.</param>
         /// <param name="a">used to find terms in the query text.</param>
-        public void Init(LuceneVersion matchVersion, string f, Analyzer a)
+        public virtual void Init(LuceneVersion matchVersion, string f, Analyzer a)
         {
             Analyzer = a;
             field = f;
@@ -191,7 +191,7 @@ namespace Lucene.Net.QueryParser.Classic
         /// <summary>
         /// Returns the default field.
         /// </summary>
-        public string Field
+        public virtual string Field
         {
             get { return field; }
         }
@@ -211,20 +211,20 @@ namespace Lucene.Net.QueryParser.Classic
         /// Get or Set the minimum similarity for fuzzy queries.
         /// Default is 2f.
         /// </summary>
-        public float FuzzyMinSim { get; set; }
+        public virtual float FuzzyMinSim { get; set; }
 
         /// <summary>
         /// Get or Set the prefix length for fuzzy queries. 
         /// Default is 0.
         /// </summary>
-        public int FuzzyPrefixLength { get; set; }
+        public virtual int FuzzyPrefixLength { get; set; }
 
         /// <summary>
         /// Gets or Sets the default slop for phrases. 
         /// If zero, then exact phrase matches are required. 
         /// Default value is zero.
         /// </summary>
-        public int PhraseSlop { get; set; }
+        public virtual int PhraseSlop { get; set; }
 
         /// <summary>
         /// Set to <code>true</code> to allow leading wildcard characters.
@@ -236,7 +236,7 @@ namespace Lucene.Net.QueryParser.Classic
         /// <p>
         /// Default: false.
         /// </summary>
-        public bool AllowLeadingWildcard { get; set; }
+        public virtual bool AllowLeadingWildcard { get; set; }
 
         /// <summary>
         /// Gets or Sets the boolean operator of the QueryParser.
@@ -246,13 +246,13 @@ namespace Lucene.Net.QueryParser.Classic
         /// In <code>AND_OPERATOR</code> mode terms are considered to be in conjunction: the
         /// above mentioned query is parsed as <code>capital AND of AND Hungary
         /// </summary>
-        public Operator DefaultOperator { get; set; }
+        public virtual Operator DefaultOperator { get; set; }
 
         /// <summary>
         /// Whether terms of wildcard, prefix, fuzzy and range queries are to be automatically
         //  lower-cased or not.  Default is <code>true</code>.
         /// </summary>
-        public bool LowercaseExpandedTerms { get; set; }
+        public virtual bool LowercaseExpandedTerms { get; set; }
 
         /// <summary>
         /// By default QueryParser uses <see cref="MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT"/>
@@ -264,22 +264,22 @@ namespace Lucene.Net.QueryParser.Classic
         /// points are not relevant then use this to change
         /// the rewrite method.
         /// </summary>
-        public MultiTermQuery.RewriteMethod MultiTermRewriteMethod { get; set; }
+        public virtual MultiTermQuery.RewriteMethod MultiTermRewriteMethod { get; set; }
 
         /// <summary>
         /// Get or Set locale used by date range parsing, lowercasing, and other
         /// locale-sensitive operations.
         /// </summary>
-        public CultureInfo Locale { get; set; }
+        public virtual CultureInfo Locale { get; set; }
 
-        public TimeZoneInfo TimeZone { get; set; }
+        public virtual TimeZoneInfo TimeZone { get; set; }
 
         /// <summary>
         /// Gets or Sets the default date resolution used by RangeQueries for fields for which no
         /// specific date resolutions has been set. Field specific resolutions can be set
         /// with <see cref="SetDateResolution(string,DateTools.Resolution)"/>.
         /// </summary>
-        public void SetDateResolution(DateTools.Resolution dateResolution)
+        public virtual void SetDateResolution(DateTools.Resolution dateResolution)
         {
             this.dateResolution = dateResolution;
         }
@@ -289,7 +289,7 @@ namespace Lucene.Net.QueryParser.Classic
         /// </summary>
         /// <param name="fieldName">field for which the date resolution is to be set</param>
         /// <param name="dateResolution">date resolution to set</param>
-        public void SetDateResolution(string fieldName, DateTools.Resolution dateResolution)
+        public virtual void SetDateResolution(string fieldName, DateTools.Resolution dateResolution)
         {
             if (string.IsNullOrEmpty(fieldName))
             {
@@ -312,7 +312,7 @@ namespace Lucene.Net.QueryParser.Classic
         /// </summary>
         /// <param name="fieldName"></param>
         /// <returns></returns>
-        public DateTools.Resolution GetDateResolution(string fieldName)
+        public virtual DateTools.Resolution GetDateResolution(string fieldName)
         {
             if (string.IsNullOrEmpty(fieldName))
             {
@@ -339,7 +339,7 @@ namespace Lucene.Net.QueryParser.Classic
         /// For example, setting this to true can enable analyzing terms into 
         /// collation keys for locale-sensitive <see cref="TermRangeQuery"/>.
         /// </summary>
-        public bool AnalyzeRangeTerms { get; set; }
+        public virtual bool AnalyzeRangeTerms { get; set; }
 
         protected internal virtual void AddClause(IList<BooleanClause> clauses, int conj, int mods, Query q)
         {
@@ -548,7 +548,7 @@ namespace Lucene.Net.QueryParser.Classic
             return new FuzzyQuery(term, numEdits, prefixLength);
         }
 
-        // LUCENE TODO: Should this be protected instead?
+        // LUCENETODO: Should this be protected instead?
         private BytesRef AnalyzeMultitermTerm(string field, string part)
         {
             return AnalyzeMultitermTerm(field, part, Analyzer);
@@ -802,7 +802,7 @@ namespace Lucene.Net.QueryParser.Classic
         }
 
         // extracted from the .jj grammar
-        protected internal virtual Query HandleBareTokenQuery(string qfield, Token term, Token fuzzySlop, bool prefix, bool wildcard, bool fuzzy, bool regexp)
+        internal virtual Query HandleBareTokenQuery(string qfield, Token term, Token fuzzySlop, bool prefix, bool wildcard, bool fuzzy, bool regexp)
         {
             Query q;
 
@@ -830,7 +830,7 @@ namespace Lucene.Net.QueryParser.Classic
             return q;
         }
 
-        protected internal virtual Query HandleBareFuzzy(string qfield, Token fuzzySlop, string termImage)
+        internal virtual Query HandleBareFuzzy(string qfield, Token fuzzySlop, string termImage)
         {
             Query q;
             float fms = FuzzyMinSim;
@@ -852,7 +852,7 @@ namespace Lucene.Net.QueryParser.Classic
         }
 
         // extracted from the .jj grammar
-        protected internal virtual Query HandleQuotedTerm(string qfield, Token term, Token fuzzySlop)
+        internal virtual Query HandleQuotedTerm(string qfield, Token term, Token fuzzySlop)
         {
             int s = PhraseSlop;  // default
             if (fuzzySlop != null)
@@ -867,7 +867,7 @@ namespace Lucene.Net.QueryParser.Classic
         }
 
         // extracted from the .jj grammar
-        protected internal virtual Query HandleBoost(Query q, Token boost)
+        internal virtual Query HandleBoost(Query q, Token boost)
         {
             if (boost != null)
             {
@@ -901,7 +901,7 @@ namespace Lucene.Net.QueryParser.Classic
         /// </summary>
         /// <param name="input"></param>
         /// <returns></returns>
-        protected internal virtual string DiscardEscapeChar(string input)
+        internal virtual string DiscardEscapeChar(string input)
         {
             // Create char array to hold unescaped char sequence
             char[] output = new char[input.Length];

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs b/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
index aac1505..ca7f2f4 100644
--- a/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
@@ -1170,14 +1170,14 @@ namespace Lucene.Net.QueryParser.Classic
 		}
 		
 		/// <summary>Reinitialise parser. </summary>
-		public virtual void  ReInit(ICharStream stream)
+		public virtual void ReInit(ICharStream stream)
 		{
 			jjmatchedPos = jjnewStateCnt = 0;
 			curLexState = defaultLexState;
 			input_stream = stream;
 			ReInitRounds();
 		}
-		private void  ReInitRounds()
+		private void ReInitRounds()
 		{
 			int i;
 			jjround = 0x80000001;
@@ -1186,14 +1186,14 @@ namespace Lucene.Net.QueryParser.Classic
 		}
 		
 		/// <summary>Reinitialise parser. </summary>
-		public virtual void  ReInit(ICharStream stream, int lexState)
+		public virtual void ReInit(ICharStream stream, int lexState)
 		{
 			ReInit(stream);
 			SwitchTo(lexState);
 		}
 		
 		/// <summary>Switch to specified lex state. </summary>
-		public virtual void  SwitchTo(int lexState)
+		public virtual void SwitchTo(int lexState)
 		{
 			if (lexState >= 3 || lexState < 0)
 				throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
@@ -1322,7 +1322,7 @@ EOFLoop: ;
 			}
 		}
 		
-		private void  JjCheckNAdd(int state)
+		private void JjCheckNAdd(int state)
 		{
 			if (jjrounds[state] != jjround)
 			{
@@ -1330,7 +1330,7 @@ EOFLoop: ;
 				jjrounds[state] = jjround;
 			}
 		}
-		private void  JjAddStates(int start, int end)
+		private void JjAddStates(int start, int end)
 		{
 			do 
 			{
@@ -1338,13 +1338,13 @@ EOFLoop: ;
 			}
 			while (start++ != end);
 		}
-		private void  JjCheckNAddTwoStates(int state1, int state2)
+		private void JjCheckNAddTwoStates(int state1, int state2)
 		{
 			JjCheckNAdd(state1);
 			JjCheckNAdd(state2);
 		}
 		
-		private void  JjCheckNAddStates(int start, int end)
+		private void JjCheckNAddStates(int start, int end)
 		{
 			do 
 			{

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Classic/TokenMgrError.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/TokenMgrError.cs b/src/Lucene.Net.QueryParser/Classic/TokenMgrError.cs
index 2f69e13..a5ef460 100644
--- a/src/Lucene.Net.QueryParser/Classic/TokenMgrError.cs
+++ b/src/Lucene.Net.QueryParser/Classic/TokenMgrError.cs
@@ -29,16 +29,16 @@ namespace Lucene.Net.QueryParser.Classic
 		*/
 
         /// <summary> Lexical error occurred.</summary>
-        internal const int LEXICAL_ERROR = 0;
+        internal static readonly int LEXICAL_ERROR = 0;
 
         /// <summary> An attempt was made to create a second instance of a static token manager.</summary>
-        internal const int STATIC_LEXER_ERROR = 1;
+        internal static readonly int STATIC_LEXER_ERROR = 1;
 
         /// <summary> Tried to change to an invalid lexical state.</summary>
-        internal const int INVALID_LEXICAL_STATE = 2;
+        internal static readonly int INVALID_LEXICAL_STATE = 2;
 
         /// <summary> Detected (and bailed out of) an infinite loop in the token manager.</summary>
-        internal const int LOOP_DETECTED = 3;
+        internal static readonly int LOOP_DETECTED = 3;
 
         /// <summary> Indicates the reason why the exception is thrown. It will have
         /// one of the above 4 values.

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs b/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
index 0ac7c5b..fb30753 100644
--- a/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
+++ b/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
@@ -58,7 +58,7 @@ namespace Lucene.Net.QueryParser.ComplexPhrase
         /// exists in the documents as the same order as in query.
         /// Choose between ordered (true) or un-ordered (false) proximity search.
         /// </summary>
-        public bool InOrder { get; set; }
+        public virtual bool InOrder { get; internal set; }
 
         private ComplexPhraseQuery currentPhraseQuery = null;
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs b/src/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs
index 6418f87..ee45f17 100644
--- a/src/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs
@@ -109,7 +109,7 @@ namespace Lucene.Net.QueryParser.Ext
         /// Returns the extension field delimiter character.
         /// </summary>
         /// <returns>the extension field delimiter character.</returns>
-        public char ExtensionFieldDelimiter
+        public virtual char ExtensionFieldDelimiter
         {
             get { return extensions.ExtensionFieldDelimiter; }
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs b/src/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs
index 610e4ad..fc86c2e 100644
--- a/src/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs
+++ b/src/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs
@@ -39,16 +39,16 @@
         /// <summary>
         /// Returns the query field
         /// </summary>
-        public string Field { get; protected set; }
+        public virtual string Field { get; protected set; }
 
         /// <summary>
         /// Returns the raw extension query string
         /// </summary>
-        public string RawQueryString { get; protected set; }
+        public virtual string RawQueryString { get; protected set; }
 
         /// <summary>
         /// Returns the top level parser which created this <see cref="ExtensionQuery"/>
         /// </summary>
-        public Classic.QueryParser TopLevelParser { get; protected set; }
+        public virtual Classic.QueryParser TopLevelParser { get; protected set; }
     }
 }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Ext/Extensions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Ext/Extensions.cs b/src/Lucene.Net.QueryParser/Ext/Extensions.cs
index 6895268..af6146c 100644
--- a/src/Lucene.Net.QueryParser/Ext/Extensions.cs
+++ b/src/Lucene.Net.QueryParser/Ext/Extensions.cs
@@ -94,7 +94,7 @@ namespace Lucene.Net.QueryParser.Ext
         /// <param name="field">the extension field string</param>
         /// <returns>a {<see cref="Tuple{String,String}"/> with the field name as the <see cref="Tuple{String,String}.Item1"/> and the
         /// extension key as the <see cref="Tuple{String,String}.Item2"/></returns>
-        public Tuple<string, string> SplitExtensionField(string defaultField, string field)
+        public virtual Tuple<string, string> SplitExtensionField(string defaultField, string field)
         {
             int indexOf = field.IndexOf(this.extensionFieldDelimiter);
             if (indexOf < 0)
@@ -111,7 +111,7 @@ namespace Lucene.Net.QueryParser.Ext
         /// <param name="extfield">the extension field identifier</param>
         /// <returns>the extension field identifier with all special chars escaped with
         /// a backslash character.</returns>
-        public string EscapeExtensionField(string extfield)
+        public virtual string EscapeExtensionField(string extfield)
         {
             return QueryParserBase.Escape(extfield);
         }
@@ -132,7 +132,7 @@ namespace Lucene.Net.QueryParser.Ext
         /// </summary>
         /// <param name="extensionKey">the extension key</param>
         /// <returns>escaped extension field identifier</returns>
-        public string BuildExtensionField(string extensionKey)
+        public virtual string BuildExtensionField(string extensionKey)
         {
             return BuildExtensionField(extensionKey, "");
         }
@@ -154,7 +154,7 @@ namespace Lucene.Net.QueryParser.Ext
         /// <param name="field">the field to apply the extension on.</param>
         /// <returns>escaped extension field identifier</returns>
         /// <remarks>See <see cref="M:BuildExtensionField(String)"/> to use the default query field</remarks>
-        public string BuildExtensionField(string extensionKey, string field)
+        public virtual string BuildExtensionField(string extensionKey, string field)
         {
             StringBuilder builder = new StringBuilder(field);
             builder.Append(this.extensionFieldDelimiter);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
index ac3d611..3e7b281 100644
--- a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
@@ -651,7 +651,7 @@ namespace Lucene.Net.QueryParser.Surround.Parser
 
         /// <summary>Get the next Token.</summary>
         [SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate")]
-        public Token GetNextToken()
+        public virtual Token GetNextToken()
         {
             Token matchedToken;
             int curPos = 0;
@@ -755,6 +755,5 @@ namespace Lucene.Net.QueryParser.Surround.Parser
                 JjCheckNAdd(jjnextStates[start]);
             } while (start++ != end);
         }
-
     }
 }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs b/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
index 2ccfc58..79120c7 100644
--- a/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
@@ -29,16 +29,16 @@ namespace Lucene.Net.QueryParser.Surround.Parser
 		*/
 
         /// <summary> Lexical error occurred.</summary>
-        internal const int LEXICAL_ERROR = 0;
+        internal static readonly int LEXICAL_ERROR = 0;
 
         /// <summary> An attempt was made to create a second instance of a static token manager.</summary>
-        internal const int STATIC_LEXER_ERROR = 1;
+        internal static readonly int STATIC_LEXER_ERROR = 1;
 
         /// <summary> Tried to change to an invalid lexical state.</summary>
-        internal const int INVALID_LEXICAL_STATE = 2;
+        internal static readonly int INVALID_LEXICAL_STATE = 2;
 
         /// <summary> Detected (and bailed out of) an infinite loop in the token manager.</summary>
-        internal const int LOOP_DETECTED = 3;
+        internal static readonly int LOOP_DETECTED = 3;
 
         /// <summary> Indicates the reason why the exception is thrown. It will have
         /// one of the above 4 values.

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs b/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
index 8992746..b0c6d36 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
@@ -52,8 +52,8 @@ namespace Lucene.Net.QueryParser.Surround.Query
         private int maxBasicQueries;
         private int queriesMade;
 
-        public int NrQueriesMade { get { return queriesMade; } }
-        public int MaxBasicQueries { get { return maxBasicQueries; } }
+        public virtual int NrQueriesMade { get { return queriesMade; } }
+        public virtual int MaxBasicQueries { get { return maxBasicQueries; } }
 
         public override string ToString()
         {
@@ -76,13 +76,13 @@ namespace Lucene.Net.QueryParser.Surround.Query
             queriesMade++;
         }
 
-        public TermQuery NewTermQuery(Term term)
+        public virtual TermQuery NewTermQuery(Term term)
         {
             CheckMax();
             return new TermQuery(term);
         }
 
-        public SpanTermQuery NewSpanTermQuery(Term term)
+        public virtual SpanTermQuery NewSpanTermQuery(Term term)
         {
             CheckMax();
             return new SpanTermQuery(term);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
index d421ad6..05010fd 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
@@ -57,7 +57,7 @@ namespace Lucene.Net.QueryParser.Surround.Query
         private bool operatorInfix;
         public virtual bool IsOperatorInfix { get { return operatorInfix; } } /* else prefix operator */
 
-        public IEnumerable<Search.Query> MakeLuceneSubQueriesField(string fn, BasicQueryFactory qf)
+        public virtual IEnumerable<Search.Query> MakeLuceneSubQueriesField(string fn, BasicQueryFactory qf)
         {
             List<Search.Query> luceneSubQueries = new List<Search.Query>();
             IEnumerator<SrndQuery> sqi = GetSubQueriesEnumerator();

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs
index 1ca7a01..20ef47d 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs
@@ -77,7 +77,7 @@ namespace Lucene.Net.QueryParser.Surround.Query
             sncf.AddSpanQuery(snq);
         }
 
-        public Search.Query GetSpanNearQuery(
+        public virtual Search.Query GetSpanNearQuery(
             IndexReader reader,
             String fieldName,
             float boost,

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
index 912bf36..228c7d6 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
@@ -52,7 +52,7 @@ namespace Lucene.Net.QueryParser.Surround.Query
             get { return false; }
         }
 
-        public Search.Query MakeLuceneQueryNoBoost(BasicQueryFactory qf)
+        public virtual Search.Query MakeLuceneQueryNoBoost(BasicQueryFactory qf)
         {
             if (fieldNames.Count() == 1)
             { /* single field name: no new queries needed */

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs
index 030923f..a5795ee 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs
@@ -20,7 +20,7 @@ namespace Lucene.Net.QueryParser.Surround.Query
      * limitations under the License.
      */
 
-    public abstract class RewriteQuery<SQ> : Search.Query
+    internal abstract class RewriteQuery<SQ> : Search.Query
     {
         protected readonly SQ srndQuery;
         protected readonly string fieldName;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
index 5e39e03..77877bb 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
@@ -80,12 +80,12 @@ namespace Lucene.Net.QueryParser.Surround.Query
             void VisitMatchingTerm(Term t);
         }
 
-        public string DistanceSubQueryNotAllowed()
+        public virtual string DistanceSubQueryNotAllowed()
         {
             return null;
         }
 
-        public void AddSpanQueries(SpanNearClauseFactory sncf)
+        public virtual void AddSpanQueries(SpanNearClauseFactory sncf)
         {
             VisitMatchingTerms(
                 sncf.IndexReader,

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
index 6502d6c..f2db8fb 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
@@ -24,7 +24,7 @@ namespace Lucene.Net.QueryParser.Surround.Query
 
     internal class SimpleTermRewriteQuery : RewriteQuery<SimpleTerm>
     {
-        public  SimpleTermRewriteQuery(
+        public SimpleTermRewriteQuery(
             SimpleTerm srndQuery,
             string fieldName,
             BasicQueryFactory qf)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs b/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
index 6cddb9c..1465462 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
@@ -76,7 +76,7 @@ namespace Lucene.Net.QueryParser.Surround.Query
             AddSpanQueryWeighted((SpanQuery)q, q.Boost);
         }
 
-        public SpanQuery MakeSpanClause()
+        public virtual SpanQuery MakeSpanClause()
         {
             List<SpanQuery> spanQueries = new List<SpanQuery>();
             foreach (var wsq in weightBySpanQuery)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs
index 7a1a8b3..fa61091 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs
@@ -22,7 +22,7 @@ namespace Lucene.Net.QueryParser.Surround.Query
      * limitations under the License.
      */
 
-    public static class SrndBooleanQuery
+    internal class SrndBooleanQuery
     {
         public static void AddQueriesToBoolean(
             BooleanQuery bq,

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs
index 57b19cc..569db53 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs
@@ -48,7 +48,6 @@ namespace Lucene.Net.QueryParser.Surround.Query
 
         public virtual string WeightOperator { get { return "^"; } }
 
-
         protected virtual void WeightToString(StringBuilder r)
         { 
             /* append the weight part of a query */

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/387d985b/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
index 369fe92..b5ce3a7 100644
--- a/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
+++ b/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
@@ -177,7 +177,7 @@ namespace Lucene.Net.QueryParser.Classic
                 : base(TEST_VERSION_CURRENT, "a", new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false))
             {}
 
-            protected internal override Query HandleBareFuzzy(string qfield, Token fuzzySlop, string termImage)
+            internal override Query HandleBareFuzzy(string qfield, Token fuzzySlop, string termImage)
             {
                 if (fuzzySlop.image.EndsWith("\u20ac"))
                 {


[17/50] [abbrv] lucenenet git commit: Ported QueryParser.Surround namespace + tests.

Posted by sy...@apache.org.
Ported QueryParser.Surround namespace + tests.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/1e7576a6
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/1e7576a6
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/1e7576a6

Branch: refs/heads/master
Commit: 1e7576a6cc2ccf8277004519ee8971283f4c89dc
Parents: 6224f3e
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Tue Aug 2 14:48:31 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:30:39 2016 +0700

----------------------------------------------------------------------
 .../Lucene.Net.QueryParser.csproj               |  27 +
 .../Surround/Parser/CharStream.cs               | 134 +++
 .../Surround/Parser/FastCharStream.cs           | 158 ++++
 .../Surround/Parser/ParseException.cs           | 234 +++++
 .../Surround/Parser/QueryParser.cs              | 912 +++++++++++++++++++
 .../Surround/Parser/QueryParserConstants.cs     | 120 +++
 .../Surround/Parser/QueryParserTokenManager.cs  | 760 ++++++++++++++++
 Lucene.Net.QueryParser/Surround/Parser/Token.cs | 142 +++
 .../Surround/Parser/TokenMgrError.cs            | 170 ++++
 .../Surround/Query/AndQuery.cs                  |  39 +
 .../Surround/Query/BasicQueryFactory.cs         | 110 +++
 .../Surround/Query/ComposedQuery.cs             | 144 +++
 .../Surround/Query/DistanceQuery.cs             | 117 +++
 .../Surround/Query/DistanceRewriteQuery.cs      |  35 +
 .../Surround/Query/DistanceSubQuery.cs          |  36 +
 .../Surround/Query/FieldsQuery.cs               | 105 +++
 .../Surround/Query/NotQuery.cs                  |  48 +
 .../Surround/Query/OrQuery.cs                   |  71 ++
 .../Surround/Query/RewriteQuery.cs              |  85 ++
 .../Surround/Query/SimpleTerm.cs                | 118 +++
 .../Surround/Query/SimpleTermRewriteQuery.cs    |  64 ++
 .../Surround/Query/SpanNearClauseFactory.cs     |  93 ++
 .../Surround/Query/SrndBooleanQuery.cs          |  51 ++
 .../Surround/Query/SrndPrefixQuery.cs           | 108 +++
 .../Surround/Query/SrndQuery.cs                 | 149 +++
 .../Surround/Query/SrndTermQuery.cs             |  63 ++
 .../Surround/Query/SrndTruncQuery.cs            | 139 +++
 .../Surround/Query/TooManyBasicQueries.cs       |  30 +
 .../Lucene.Net.Tests.QueryParser.csproj         |   7 +
 .../Surround/Query/BooleanQueryTst.cs           | 142 +++
 .../Surround/Query/ExceptionQueryTst.cs         |  76 ++
 .../Surround/Query/SingleFieldTestDb.cs         |  55 ++
 .../Surround/Query/SrndQueryTest.cs             |  48 +
 .../Surround/Query/Test01Exceptions.cs          |  72 ++
 .../Surround/Query/Test02Boolean.cs             | 178 ++++
 .../Surround/Query/Test03Distance.cs            | 341 +++++++
 36 files changed, 5181 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj b/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
index 646e931..dc38a02 100644
--- a/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
+++ b/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
@@ -58,6 +58,33 @@
     <Compile Include="Flexible\Standard\CommonQueryParserConfiguration.cs" />
     <Compile Include="Properties\AssemblyInfo.cs" />
     <Compile Include="Simple\SimpleQueryParser.cs" />
+    <Compile Include="Surround\Parser\CharStream.cs" />
+    <Compile Include="Surround\Parser\FastCharStream.cs" />
+    <Compile Include="Surround\Parser\ParseException.cs" />
+    <Compile Include="Surround\Parser\QueryParser.cs" />
+    <Compile Include="Surround\Parser\QueryParserConstants.cs" />
+    <Compile Include="Surround\Parser\QueryParserTokenManager.cs" />
+    <Compile Include="Surround\Parser\Token.cs" />
+    <Compile Include="Surround\Parser\TokenMgrError.cs" />
+    <Compile Include="Surround\Query\AndQuery.cs" />
+    <Compile Include="Surround\Query\BasicQueryFactory.cs" />
+    <Compile Include="Surround\Query\ComposedQuery.cs" />
+    <Compile Include="Surround\Query\DistanceQuery.cs" />
+    <Compile Include="Surround\Query\DistanceRewriteQuery.cs" />
+    <Compile Include="Surround\Query\DistanceSubQuery.cs" />
+    <Compile Include="Surround\Query\FieldsQuery.cs" />
+    <Compile Include="Surround\Query\NotQuery.cs" />
+    <Compile Include="Surround\Query\OrQuery.cs" />
+    <Compile Include="Surround\Query\RewriteQuery.cs" />
+    <Compile Include="Surround\Query\SimpleTerm.cs" />
+    <Compile Include="Surround\Query\SimpleTermRewriteQuery.cs" />
+    <Compile Include="Surround\Query\SpanNearClauseFactory.cs" />
+    <Compile Include="Surround\Query\SrndBooleanQuery.cs" />
+    <Compile Include="Surround\Query\SrndPrefixQuery.cs" />
+    <Compile Include="Surround\Query\SrndQuery.cs" />
+    <Compile Include="Surround\Query\SrndTermQuery.cs" />
+    <Compile Include="Surround\Query\SrndTruncQuery.cs" />
+    <Compile Include="Surround\Query\TooManyBasicQueries.cs" />
   </ItemGroup>
   <ItemGroup>
     <ProjectReference Include="..\src\Lucene.Net.Analysis.Common\Lucene.Net.Analysis.Common.csproj">

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs b/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs
new file mode 100644
index 0000000..bfb2fc2
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Parser/CharStream.cs
@@ -0,0 +1,134 @@
+\ufeffusing System;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+
+    /// <summary> 
+    /// This interface describes a character stream that maintains line and
+    /// column number positions of the characters.  It also has the capability
+    /// to backup the stream to some extent.  An implementation of this
+    /// interface is used in the TokenManager implementation generated by
+    /// JavaCCParser.
+    /// 
+    /// All the methods except backup can be implemented in any fashion. backup
+    /// needs to be implemented correctly for the correct operation of the lexer.
+    /// Rest of the methods are all used to get information like line number,
+    /// column number and the String that constitutes a token and are not used
+    /// by the lexer. Hence their implementation won't affect the generated lexer's
+    /// operation.
+    /// </summary>
+    public interface ICharStream
+    {
+        /// <summary> 
+        /// Returns the next character from the selected input.  The method
+        /// of selecting the input is the responsibility of the class
+        /// implementing this interface.  Can throw any java.io.IOException.
+        /// </summary>
+        char ReadChar();
+
+        /// <summary>
+        /// Returns the column position of the character last read.
+        /// </summary>
+        /// <deprecated>
+        /// </deprecated>
+        /// <seealso cref="EndColumn">
+        /// </seealso>
+        [Obsolete]
+        int Column { get; }
+
+        /// <summary>
+        /// Returns the line number of the character last read.
+        /// </summary>
+        /// <deprecated>
+        /// </deprecated>
+        /// <seealso cref="EndLine">
+        /// </seealso>
+        [Obsolete]
+        int Line { get; }
+
+        /// <summary>
+        /// Returns the column number of the last character for current token (being
+        /// matched after the last call to BeginTOken).
+        /// </summary>
+        int EndColumn { get; }
+
+        /// <summary> 
+        /// Returns the line number of the last character for current token (being
+        /// matched after the last call to BeginTOken).
+        /// </summary>
+        int EndLine { get; }
+
+        /// <summary> 
+        /// Returns the column number of the first character for current token (being
+        /// matched after the last call to BeginTOken).
+        /// </summary>
+        int BeginColumn { get; }
+
+        /// <summary> 
+        /// Returns the line number of the first character for current token (being
+        /// matched after the last call to BeginTOken).
+        /// </summary>
+        int BeginLine { get; }
+
+        /// <summary> 
+        /// Backs up the input stream by amount steps. Lexer calls this method if it
+        /// had already read some characters, but could not use them to match a
+        /// (longer) token. So, they will be used again as the prefix of the next
+        /// token and it is the implemetation's responsibility to do this right.
+        /// </summary>
+        void Backup(int amount);
+
+        /// <summary> 
+        /// Returns the next character that marks the beginning of the next token.
+        /// All characters must remain in the buffer between two successive calls
+        /// to this method to implement backup correctly.
+        /// </summary>
+        char BeginToken();
+
+        /// <summary> 
+        /// Returns a string made up of characters from the marked token beginning
+        /// to the current buffer position. Implementations have the choice of returning
+        /// anything that they want to. For example, for efficiency, one might decide
+        /// to just return null, which is a valid implementation.
+        /// </summary>
+        string Image { get; }
+
+        /// <summary> 
+        /// Returns an array of characters that make up the suffix of length 'len' for
+        /// the currently matched token. This is used to build up the matched string
+        /// for use in actions in the case of MORE. A simple and inefficient
+        /// implementation of this is as follows :
+        /// 
+        /// {
+        /// String t = GetImage();
+        /// return t.substring(t.length() - len, t.length()).toCharArray();
+        /// }
+        /// </summary>
+        char[] GetSuffix(int len);
+
+        /// <summary> 
+        /// The lexer calls this function to indicate that it is done with the stream
+        /// and hence implementations can free any resources held by this class.
+        /// Again, the body of this function can be just empty and it will not
+        /// affect the lexer's operation.
+        /// </summary>
+        void Done();
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs b/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs
new file mode 100644
index 0000000..b33bd83
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Parser/FastCharStream.cs
@@ -0,0 +1,158 @@
+\ufeffusing System;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// An efficient implementation of JavaCC's CharStream interface.  <p/>Note that
+    /// this does not do line-number counting, but instead keeps track of the
+    /// character position of the token in the input, as required by Lucene's <see cref="Lucene.Net.Analysis.Token" />
+    /// API.
+    /// </summary>
+    public sealed class FastCharStream : ICharStream
+    {
+        internal char[] buffer = null;
+
+        internal int bufferLength = 0; // end of valid chars
+        internal int bufferPosition = 0; // next char to read
+
+        internal int tokenStart = 0; // offset in buffer
+        internal int bufferStart = 0; // position in file of buffer
+
+        internal System.IO.TextReader input; // source of chars
+
+        /// <summary>
+        /// Constructs from a Reader. 
+        /// </summary>
+        public FastCharStream(System.IO.TextReader r)
+        {
+            input = r;
+        }
+
+        public char ReadChar()
+        {
+            if (bufferPosition >= bufferLength)
+                Refill();
+            return buffer[bufferPosition++];
+        }
+
+        private void Refill()
+        {
+            int newPosition = bufferLength - tokenStart;
+
+            if (tokenStart == 0)
+            {
+                // token won't fit in buffer
+                if (buffer == null)
+                {
+                    // first time: alloc buffer
+                    buffer = new char[2048];
+                }
+                else if (bufferLength == buffer.Length)
+                {
+                    // grow buffer
+                    char[] newBuffer = new char[buffer.Length * 2];
+                    Array.Copy(buffer, 0, newBuffer, 0, bufferLength);
+                    buffer = newBuffer;
+                }
+            }
+            else
+            {
+                // shift token to front
+                Array.Copy(buffer, tokenStart, buffer, 0, newPosition);
+            }
+
+            bufferLength = newPosition; // update state
+            bufferPosition = newPosition;
+            bufferStart += tokenStart;
+            tokenStart = 0;
+
+            int charsRead = input.Read(buffer, newPosition, buffer.Length - newPosition);
+            if (charsRead <= 0)
+                throw new System.IO.IOException("read past eof");
+            else
+                bufferLength += charsRead;
+        }
+
+        public char BeginToken()
+        {
+            tokenStart = bufferPosition;
+            return ReadChar();
+        }
+
+        public void Backup(int amount)
+        {
+            bufferPosition -= amount;
+        }
+
+        public string Image
+        {
+            get { return new System.String(buffer, tokenStart, bufferPosition - tokenStart); }
+        }
+
+        public char[] GetSuffix(int len)
+        {
+            char[] value_Renamed = new char[len];
+            Array.Copy(buffer, bufferPosition - len, value_Renamed, 0, len);
+            return value_Renamed;
+        }
+
+        public void Done()
+        {
+            try
+            {
+                input.Close();
+            }
+            catch (System.IO.IOException e)
+            {
+                System.Console.Error.WriteLine("Caught: " + e + "; ignoring.");
+            }
+        }
+
+        public int Column
+        {
+            get { return bufferStart + bufferPosition; }
+        }
+
+        public int Line
+        {
+            get { return 1; }
+        }
+
+        public int EndColumn
+        {
+            get { return bufferStart + bufferPosition; }
+        }
+
+        public int EndLine
+        {
+            get { return 1; }
+        }
+
+        public int BeginColumn
+        {
+            get { return bufferStart + tokenStart; }
+        }
+
+        public int BeginLine
+        {
+            get { return 1; }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs b/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
new file mode 100644
index 0000000..1716658
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
@@ -0,0 +1,234 @@
+\ufeffusing System;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary> 
+    /// This exception is thrown when parse errors are encountered.
+    /// You can explicitly create objects of this exception type by
+    /// calling the method GenerateParseException in the generated
+    /// parser.
+    /// 
+    /// You can modify this class to customize your error reporting
+    /// mechanisms so long as you retain the public fields.
+    /// </summary>
+    [Serializable]
+    public class ParseException : Exception
+    {
+        /// <summary>
+        /// This constructor is used by the method "GenerateParseException"
+        /// in the generated parser.  Calling this constructor generates
+        /// a new object of this type with the fields "currentToken",
+        /// "expectedTokenSequences", and "tokenImage" set.
+        /// </summary>
+        /// <param name="currentTokenVal"></param>
+        /// <param name="expectedTokenSequencesVal"></param>
+        /// <param name="tokenImageVal"></param>
+        public ParseException(Token currentTokenVal,
+                        int[][] expectedTokenSequencesVal,
+                        string[] tokenImageVal)
+            : base(Initialize(currentTokenVal, expectedTokenSequencesVal, tokenImageVal))
+        {
+            currentToken = currentTokenVal;
+            expectedTokenSequences = expectedTokenSequencesVal;
+            tokenImage = tokenImageVal;
+        }
+
+        /**
+         * The following constructors are for use by you for whatever
+         * purpose you can think of.  Constructing the exception in this
+         * manner makes the exception behave in the normal way - i.e., as
+         * documented in the class "Throwable".  The fields "errorToken",
+         * "expectedTokenSequences", and "tokenImage" do not contain
+         * relevant information.  The JavaCC generated code does not use
+         * these constructors.
+         */
+
+        public ParseException()
+        { }
+
+        public ParseException(string message)
+            : base(message)
+        { }
+
+        public ParseException(string message, Exception innerException)
+            : base(message, innerException)
+        { }
+
+
+        /// <summary> 
+        /// This is the last token that has been consumed successfully.  If
+        /// this object has been created due to a parse error, the token
+        /// following this token will (therefore) be the first error token.
+        /// </summary>
+        public Token currentToken;
+
+        /// <summary> 
+        /// Each entry in this array is an array of integers.  Each array
+        /// of integers represents a sequence of tokens (by their ordinal
+        /// values) that is expected at this point of the parse.
+        /// </summary>
+        public int[][] expectedTokenSequences;
+
+        /// <summary> 
+        /// This is a reference to the "tokenImage" array of the generated
+        /// parser within which the parse error occurred.  This array is
+        /// defined in the generated ...Constants interface.
+        /// </summary>
+        public string[] tokenImage;
+
+
+        /// <summary>
+        /// It uses "currentToken" and "expectedTokenSequences" to generate a parse
+        /// error message and returns it.  If this object has been created
+        /// due to a parse error, and you do not catch it (it gets thrown
+        /// from the parser) the correct error message
+        /// gets displayed.
+        /// </summary>
+        /// <param name="currentToken"></param>
+        /// <param name="expectedTokenSequences"></param>
+        /// <param name="tokenImage"></param>
+        /// <returns></returns>
+        private static string Initialize(Token currentToken,
+            int[][] expectedTokenSequences,
+            string[] tokenImage)
+        {
+
+            StringBuilder expected = new StringBuilder();
+            int maxSize = 0;
+            for (int i = 0; i < expectedTokenSequences.Length; i++)
+            {
+                if (maxSize < expectedTokenSequences[i].Length)
+                {
+                    maxSize = expectedTokenSequences[i].Length;
+                }
+                for (int j = 0; j < expectedTokenSequences[i].Length; j++)
+                {
+                    expected.Append(tokenImage[expectedTokenSequences[i][j]]).Append(' ');
+                }
+                if (expectedTokenSequences[i][expectedTokenSequences[i].Length - 1] != 0)
+                {
+                    expected.Append("...");
+                }
+                expected.Append(eol).Append("    ");
+            }
+            string retval = "Encountered \"";
+            Token tok = currentToken.next;
+            for (int i = 0; i < maxSize; i++)
+            {
+                if (i != 0)
+                    retval += " ";
+                if (tok.kind == 0)
+                {
+                    retval += tokenImage[0];
+                    break;
+                }
+                retval += (" " + tokenImage[tok.kind]);
+                retval += " \"";
+                retval += Add_escapes(tok.image);
+                retval += " \"";
+                tok = tok.next;
+            }
+            retval += ("\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn);
+            retval += ("." + eol);
+            if (expectedTokenSequences.Length == 1)
+            {
+                retval += ("Was expecting:" + eol + "    ");
+            }
+            else
+            {
+                retval += ("Was expecting one of:" + eol + "    ");
+            }
+            retval += expected.ToString();
+            return retval;
+        }
+
+        /// <summary> 
+        /// The end of line string for this machine.
+        /// </summary>
+        protected static string eol = Environment.NewLine;
+
+        /// <summary> 
+        /// Used to convert raw characters to their escaped version
+        /// when these raw version cannot be used as part of an ASCII
+        /// string literal.
+        /// </summary>
+        internal static string Add_escapes(string str)
+        {
+            StringBuilder retval = new StringBuilder();
+            char ch;
+            for (int i = 0; i < str.Length; i++)
+            {
+                switch (str[i])
+                {
+
+                    case (char)(0):
+                        continue;
+
+                    case '\b':
+                        retval.Append("\\b");
+                        continue;
+
+                    case '\t':
+                        retval.Append("\\t");
+                        continue;
+
+                    case '\n':
+                        retval.Append("\\n");
+                        continue;
+
+                    case '\f':
+                        retval.Append("\\f");
+                        continue;
+
+                    case '\r':
+                        retval.Append("\\r");
+                        continue;
+
+                    case '\"':
+                        retval.Append("\\\"");
+                        continue;
+
+                    case '\'':
+                        retval.Append("\\\'");
+                        continue;
+
+                    case '\\':
+                        retval.Append("\\\\");
+                        continue;
+
+                    default:
+                        if ((ch = str[i]) < 0x20 || ch > 0x7e)
+                        {
+                            System.String s = "0000" + System.Convert.ToString(ch, 16);
+                            retval.Append("\\u" + s.Substring(s.Length - 4, (s.Length) - (s.Length - 4)));
+                        }
+                        else
+                        {
+                            retval.Append(ch);
+                        }
+                        continue;
+
+                }
+            }
+            return retval.ToString();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs b/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
new file mode 100644
index 0000000..49ef7d4
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
@@ -0,0 +1,912 @@
+\ufeffusing Lucene.Net.QueryParser.Surround.Query;
+using System;
+using System.Collections.Generic;
+using System.IO;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// This class is generated by JavaCC.  The only method that clients should need
+    /// to call is {@link #parse parse()}.
+    ///
+
+    /// <p>This parser generates queries that make use of position information
+    ///   (Span queries). It provides positional operators (<code>w</code> and
+    ///   <code>n</code>) that accept a numeric distance, as well as boolean
+    ///   operators (<code>and</code>, <code>or</code>, and <code>not</code>,
+    ///   wildcards (<code>///</code> and <code>?</code>), quoting (with
+    ///   <code>"</code>), and boosting (via <code>^</code>).</p>
+
+    ///  <p>The operators (W, N, AND, OR, NOT) can be expressed lower-cased or
+    ///   upper-cased, and the non-unary operators (everything but NOT) support
+    ///   both infix <code>(a AND b AND c)</code> and prefix <code>AND(a, b,
+    ///   c)</code> notation. </p>
+
+    ///  <p>The W and N operators express a positional relationship among their
+    ///  operands.  N is ordered, and W is unordered.  The distance is 1 by
+    ///  default, meaning the operands are adjacent, or may be provided as a
+    ///  prefix from 2-99.  So, for example, 3W(a, b) means that terms a and b
+    ///  must appear within three positions of each other, or in other words, up
+    ///  to two terms may appear between a and b.  </p>
+    /// </summary>
+    public class QueryParser
+    {
+        internal readonly int minimumPrefixLength = 3;
+        internal readonly int minimumCharsInTrunc = 3;
+        internal readonly string truncationErrorMessage = "Too unrestrictive truncation: ";
+        internal readonly string boostErrorMessage = "Cannot handle boost value: ";
+
+        /* CHECKME: These should be the same as for the tokenizer. How? */
+        internal readonly char truncator = '*';
+        internal readonly char anyChar = '?';
+        internal readonly char quote = '"';
+        internal readonly char fieldOperator = ':';
+        internal readonly char comma = ','; /* prefix list separator */
+        internal readonly char carat = '^'; /* weight operator */
+
+        public static SrndQuery Parse(string query)
+        {
+            QueryParser parser = new QueryParser();
+            return parser.Parse2(query);
+        }
+
+        public QueryParser()
+            : this(new FastCharStream(new StringReader("")))
+        {
+        }
+
+        public virtual SrndQuery Parse2(string query)
+        {
+            ReInit(new FastCharStream(new StringReader(query)));
+            try
+            {
+                return TopSrndQuery();
+            }
+            catch (TokenMgrError tme)
+            {
+                throw new ParseException(tme.Message);
+            }
+        }
+
+        protected virtual SrndQuery GetFieldsQuery(
+            SrndQuery q, IEnumerable<string> fieldNames)
+        {
+            /* FIXME: check acceptable subquery: at least one subquery should not be
+             * a fields query.
+             */
+            return new FieldsQuery(q, fieldNames, fieldOperator);
+        }
+
+        protected virtual SrndQuery GetOrQuery(IEnumerable<SrndQuery> queries, bool infix, Token orToken)
+        {
+            return new OrQuery(queries, infix, orToken.image);
+        }
+
+        protected virtual SrndQuery GetAndQuery(IEnumerable<SrndQuery> queries, bool infix, Token andToken)
+        {
+            return new AndQuery(queries, infix, andToken.image);
+        }
+
+        protected virtual SrndQuery GetNotQuery(IEnumerable<SrndQuery> queries, Token notToken)
+        {
+            return new NotQuery(queries, notToken.image);
+        }
+
+        protected static int GetOpDistance(string distanceOp)
+        {
+            /* W, 2W, 3W etc -> 1, 2 3, etc. Same for N, 2N ... */
+            return distanceOp.Length == 1
+              ? 1
+              : int.Parse(distanceOp.Substring(0, distanceOp.Length - 1));
+        }
+
+        protected static void CheckDistanceSubQueries(DistanceQuery distq, string opName)
+        {
+            string m = distq.DistanceSubQueryNotAllowed();
+            if (m != null)
+            {
+                throw new ParseException("Operator " + opName + ": " + m);
+            }
+        }
+
+        protected virtual SrndQuery GetDistanceQuery(
+            IEnumerable<SrndQuery> queries,
+            bool infix,
+            Token dToken,
+            bool ordered)
+        {
+            DistanceQuery dq = new DistanceQuery(queries,
+                                                infix,
+                                                GetOpDistance(dToken.image),
+                                                dToken.image,
+                                                ordered);
+            CheckDistanceSubQueries(dq, dToken.image);
+            return dq;
+        }
+
+        protected virtual SrndQuery GetTermQuery(
+              String term, bool quoted)
+        {
+            return new SrndTermQuery(term, quoted);
+        }
+
+        protected virtual bool AllowedSuffix(String suffixed)
+        {
+            return (suffixed.Length - 1) >= minimumPrefixLength;
+        }
+
+        protected virtual SrndQuery GetPrefixQuery(
+            string prefix, bool quoted)
+        {
+            return new SrndPrefixQuery(prefix, quoted, truncator);
+        }
+
+        protected virtual bool AllowedTruncation(string truncated)
+        {
+            /* At least 3 normal characters needed. */
+            int nrNormalChars = 0;
+            for (int i = 0; i < truncated.Length; i++)
+            {
+                char c = truncated[i];
+                if ((c != truncator) && (c != anyChar))
+                {
+                    nrNormalChars++;
+                }
+            }
+            return nrNormalChars >= minimumCharsInTrunc;
+        }
+
+        protected virtual SrndQuery GetTruncQuery(string truncated)
+        {
+            return new SrndTruncQuery(truncated, truncator, anyChar);
+        }
+
+        public SrndQuery TopSrndQuery()
+        {
+            SrndQuery q;
+            q = FieldsQuery();
+            Jj_consume_token(0);
+            { if (true) return q; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery FieldsQuery()
+        {
+            SrndQuery q;
+            IEnumerable<string> fieldNames;
+            fieldNames = OptionalFields();
+            q = OrQuery();
+            { if (true) return (fieldNames == null) ? q : GetFieldsQuery(q, fieldNames); }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public IEnumerable<string> OptionalFields()
+        {
+            Token fieldName;
+            IList<string> fieldNames = null;
+
+            while (true)
+            {
+                if (Jj_2_1(2))
+                {
+                    ;
+                }
+                else
+                {
+                    goto label_1;
+                }
+                // to the colon
+                fieldName = Jj_consume_token(RegexpToken.TERM);
+                Jj_consume_token(RegexpToken.COLON);
+                if (fieldNames == null)
+                {
+                    fieldNames = new List<string>();
+                }
+                fieldNames.Add(fieldName.image);
+            }
+        label_1:
+            { if (true) return fieldNames; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery OrQuery()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries = null;
+            Token oprt = null;
+            q = AndQuery();
+
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.OR:
+                        ;
+                        break;
+                    default:
+                        jj_la1[0] = jj_gen;
+                        goto label_2;
+                }
+                oprt = Jj_consume_token(RegexpToken.OR);
+                /* keep only last used operator */
+                if (queries == null)
+                {
+                    queries = new List<SrndQuery>();
+                    queries.Add(q);
+                }
+                q = AndQuery();
+                queries.Add(q);
+            }
+        label_2:
+            { if (true) return (queries == null) ? q : GetOrQuery(queries, true /* infix */, oprt); }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery AndQuery()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries = null;
+            Token oprt = null;
+            q = NotQuery();
+
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.AND:
+                        ;
+                        break;
+                    default:
+                        jj_la1[1] = jj_gen;
+                        goto label_3;
+                }
+                oprt = Jj_consume_token(RegexpToken.AND);
+                /* keep only last used operator */
+                if (queries == null)
+                {
+                    queries = new List<SrndQuery>();
+                    queries.Add(q);
+                }
+                q = NotQuery();
+                queries.Add(q);
+            }
+        label_3:
+            { if (true) return (queries == null) ? q : GetAndQuery(queries, true /* infix */, oprt); }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery NotQuery()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries = null;
+            Token oprt = null;
+            q = NQuery();
+
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.NOT:
+                        ;
+                        break;
+                    default:
+                        jj_la1[2] = jj_gen;
+                        goto label_4;
+                }
+                oprt = Jj_consume_token(RegexpToken.NOT);
+                /* keep only last used operator */
+                if (queries == null)
+                {
+                    queries = new List<SrndQuery>();
+                    queries.Add(q);
+                }
+                q = NQuery();
+                queries.Add(q);
+            }
+        label_4:
+            { if (true) return (queries == null) ? q : GetNotQuery(queries, oprt); }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery NQuery()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries;
+            Token dt;
+            q = WQuery();
+
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.N:
+                        ;
+                        break;
+                    default:
+                        jj_la1[3] = jj_gen;
+                        goto label_5;
+                }
+                dt = Jj_consume_token(RegexpToken.N);
+                queries = new List<SrndQuery>();
+                queries.Add(q); /* left associative */
+
+                q = WQuery();
+                queries.Add(q);
+                q = GetDistanceQuery(queries, true /* infix */, dt, false /* not ordered */);
+            }
+        label_5:
+            { if (true) return q; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery WQuery()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries;
+            Token wt;
+            q = PrimaryQuery();
+
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.W:
+                        ;
+                        break;
+                    default:
+                        jj_la1[4] = jj_gen;
+                        goto label_6;
+                }
+                wt = Jj_consume_token(RegexpToken.W);
+                queries = new List<SrndQuery>();
+                queries.Add(q); /* left associative */
+
+                q = PrimaryQuery();
+                queries.Add(q);
+                q = GetDistanceQuery(queries, true /* infix */, wt, true /* ordered */);
+            }
+        label_6:
+            { if (true) return q; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery PrimaryQuery()
+        {
+            /* bracketed weighted query or weighted term */
+            SrndQuery q;
+            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+            {
+                case RegexpToken.LPAREN:
+                    Jj_consume_token(RegexpToken.LPAREN);
+                    q = FieldsQuery();
+                    Jj_consume_token(RegexpToken.RPAREN);
+                    break;
+                case RegexpToken.OR:
+                case RegexpToken.AND:
+                case RegexpToken.W:
+                case RegexpToken.N:
+                    q = PrefixOperatorQuery();
+                    break;
+                case RegexpToken.TRUNCQUOTED:
+                case RegexpToken.QUOTED:
+                case RegexpToken.SUFFIXTERM:
+                case RegexpToken.TRUNCTERM:
+                case RegexpToken.TERM:
+                    q = SimpleTerm();
+                    break;
+                default:
+                    jj_la1[5] = jj_gen;
+                    Jj_consume_token(-1);
+                    throw new ParseException();
+            }
+            OptionalWeights(q);
+            { if (true) return q; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery PrefixOperatorQuery()
+        {
+            Token oprt;
+            IEnumerable<SrndQuery> queries;
+            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+            {
+                case RegexpToken.OR:
+                    oprt = Jj_consume_token(RegexpToken.OR);
+                    /* prefix OR */
+                    queries = FieldsQueryList();
+                    { if (true) return GetOrQuery(queries, false /* not infix */, oprt); }
+                    break;
+                case RegexpToken.AND:
+                    oprt = Jj_consume_token(RegexpToken.AND);
+                    /* prefix AND */
+                    queries = FieldsQueryList();
+                    { if (true) return GetAndQuery(queries, false /* not infix */, oprt); }
+                    break;
+                case RegexpToken.N:
+                    oprt = Jj_consume_token(RegexpToken.N);
+                    /* prefix N */
+                    queries = FieldsQueryList();
+                    { if (true) return GetDistanceQuery(queries, false /* not infix */, oprt, false /* not ordered */); }
+                    break;
+                case RegexpToken.W:
+                    oprt = Jj_consume_token(RegexpToken.W);
+                    /* prefix W */
+                    queries = FieldsQueryList();
+                    { if (true) return GetDistanceQuery(queries, false  /* not infix */, oprt, true /* ordered */); }
+                    break;
+                default:
+                    jj_la1[6] = jj_gen;
+                    Jj_consume_token(-1);
+                    throw new ParseException();
+            }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public IEnumerable<SrndQuery> FieldsQueryList()
+        {
+            SrndQuery q;
+            IList<SrndQuery> queries = new List<SrndQuery>();
+            Jj_consume_token(RegexpToken.LPAREN);
+            q = FieldsQuery();
+            queries.Add(q);
+
+            while (true)
+            {
+                Jj_consume_token(RegexpToken.COMMA);
+                q = FieldsQuery();
+                queries.Add(q);
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.COMMA:
+                        ;
+                        break;
+                    default:
+                        jj_la1[7] = jj_gen;
+                        goto label_7;
+                }
+            }
+        label_7:
+            Jj_consume_token(RegexpToken.RPAREN);
+            { if (true) return queries; }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public SrndQuery SimpleTerm()
+        {
+            Token term;
+            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+            {
+                case RegexpToken.TERM:
+                    term = Jj_consume_token(RegexpToken.TERM);
+                    { if (true) return GetTermQuery(term.image, false /* not quoted */); }
+                    break;
+                case RegexpToken.QUOTED:
+                    term = Jj_consume_token(RegexpToken.QUOTED);
+                    // TODO: Substring fix
+                    { if (true) return GetTermQuery(term.image.Substring(1, (term.image.Length - 1) - 1), true /* quoted */); }
+                    break;
+                case RegexpToken.SUFFIXTERM:
+                    term = Jj_consume_token(RegexpToken.SUFFIXTERM);
+                    /* ending in * */
+                    if (!AllowedSuffix(term.image))
+                    {
+                        { if (true) throw new ParseException(truncationErrorMessage + term.image); }
+                    }
+                    // TODO: Substring fix
+                    { if (true) return GetPrefixQuery(term.image.Substring(0, term.image.Length - 1), false /* not quoted */); }
+                    break;
+                case RegexpToken.TRUNCTERM:
+                    term = Jj_consume_token(RegexpToken.TRUNCTERM);
+                    /* with at least one * or ? */
+                    if (!AllowedTruncation(term.image))
+                    {
+                        { if (true) throw new ParseException(truncationErrorMessage + term.image); }
+                    }
+                    { if (true) return GetTruncQuery(term.image); }
+                    break;
+                case RegexpToken.TRUNCQUOTED:
+                    term = Jj_consume_token(RegexpToken.TRUNCQUOTED);
+                    /* eg. "9b-b,m"* */
+                    if ((term.image.Length - 3) < minimumPrefixLength)
+                    {
+                        { if (true) throw new ParseException(truncationErrorMessage + term.image); }
+                    }
+                    // TODO: Substring fix
+                    { if (true) return GetPrefixQuery(term.image.Substring(1, (term.image.Length - 2) - 1), true /* quoted */); }
+                    break;
+                default:
+                    jj_la1[8] = jj_gen;
+                    Jj_consume_token(-1);
+                    throw new ParseException();
+            }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public void OptionalWeights(SrndQuery q)
+        {
+            Token weight = null;
+        
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.CARAT:
+                        ;
+                        break;
+                    default:
+                        jj_la1[9] = jj_gen;
+                        goto label_8;
+                }
+                Jj_consume_token(RegexpToken.CARAT);
+                weight = Jj_consume_token(RegexpToken.NUMBER);
+                float f;
+                try
+                {
+                    // TODO: Test parsing float in various cultures (.NET)
+                    f = float.Parse(weight.image);
+                }
+                catch (Exception floatExc)
+                {
+                    { if (true) throw new ParseException(boostErrorMessage + weight.image + " (" + floatExc + ")"); }
+                }
+                if (f <= 0.0)
+                {
+                    { if (true) throw new ParseException(boostErrorMessage + weight.image); }
+                }
+                q.Weight = (f * q.Weight); /* left associative, fwiw */
+            }
+        label_8: ;
+        }
+
+        private bool Jj_2_1(int xla)
+        {
+            jj_la = xla; jj_lastpos = jj_scanpos = token;
+            try { return !Jj_3_1(); }
+            catch (LookaheadSuccess) { return true; }
+            finally { Jj_save(0, xla); }
+        }
+
+        private bool Jj_3_1()
+        {
+            if (Jj_scan_token(RegexpToken.TERM)) return true;
+            if (Jj_scan_token(RegexpToken.COLON)) return true;
+            return false;
+        }
+
+        /** Generated Token Manager. */
+        public QueryParserTokenManager token_source;
+        /** Current token. */
+        public Token token;
+        /** Next token. */
+        public Token jj_nt;
+        private int jj_ntk;
+        private Token jj_scanpos, jj_lastpos;
+        private int jj_la;
+        private int jj_gen;
+        private readonly int[] jj_la1 = new int[10];
+        private static int[] jj_la1_0;
+        static QueryParser()
+        {
+            Jj_la1_init_0();
+        }
+
+        private static void Jj_la1_init_0()
+        {
+            jj_la1_0 = new int[] { 0x100, 0x200, 0x400, 0x1000, 0x800, 0x7c3b00, 0x1b00, 0x8000, 0x7c0000, 0x20000, };
+        }
+        private readonly JJCalls[] jj_2_rtns = new JJCalls[1];
+        private bool jj_rescan = false;
+        private int jj_gc = 0;
+
+        /// <summary>
+        /// Constructor with user supplied CharStream.
+        /// </summary>
+        /// <param name="stream"></param>
+        public QueryParser(ICharStream stream)
+        {
+            token_source = new QueryParserTokenManager(stream);
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        /// <summary>
+        /// Reinitialise.
+        /// </summary>
+        /// <param name="stream"></param>
+        public virtual void ReInit(ICharStream stream)
+        {
+            token_source.ReInit(stream);
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        /// <summary>
+        /// Constructor with generated Token Manager.
+        /// </summary>
+        /// <param name="tm"></param>
+        public QueryParser(QueryParserTokenManager tm)
+        {
+            token_source = tm;
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        /// <summary>
+        /// Reinitialise.
+        /// </summary>
+        /// <param name="tm"></param>
+        public virtual void ReInit(QueryParserTokenManager tm)
+        {
+            token_source = tm;
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        private Token Jj_consume_token(int kind)
+        {
+            Token oldToken;
+            if ((oldToken = token).next != null) token = token.next;
+            else token = token.next = token_source.GetNextToken();
+            jj_ntk = -1;
+            if (token.kind == kind)
+            {
+                jj_gen++;
+                if (++jj_gc > 100)
+                {
+                    jj_gc = 0;
+                    for (int i = 0; i < jj_2_rtns.Length; i++)
+                    {
+                        JJCalls c = jj_2_rtns[i];
+                        while (c != null)
+                        {
+                            if (c.gen < jj_gen) c.first = null;
+                            c = c.next;
+                        }
+                    }
+                }
+                return token;
+            }
+            token = oldToken;
+            jj_kind = kind;
+            throw GenerateParseException();
+        }
+
+        private sealed class LookaheadSuccess : Exception { }
+        private readonly LookaheadSuccess jj_ls = new LookaheadSuccess();
+
+        private bool Jj_scan_token(int kind)
+        {
+            if (jj_scanpos == jj_lastpos)
+            {
+                jj_la--;
+                if (jj_scanpos.next == null)
+                {
+                    jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.GetNextToken();
+                }
+                else
+                {
+                    jj_lastpos = jj_scanpos = jj_scanpos.next;
+                }
+            }
+            else
+            {
+                jj_scanpos = jj_scanpos.next;
+            }
+            if (jj_rescan)
+            {
+                int i = 0; Token tok = token;
+                while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; }
+                if (tok != null) Jj_add_error_token(kind, i);
+            }
+            if (jj_scanpos.kind != kind) return true;
+            if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;
+            return false;
+        }
+
+        /// <summary>
+        /// Get the next Token.
+        /// </summary>
+        /// <returns></returns>
+        public Token GetNextToken()
+        {
+            if (token.next != null) token = token.next;
+            else token = token.next = token_source.GetNextToken();
+            jj_ntk = -1;
+            jj_gen++;
+            return token;
+        }
+
+        /// <summary>
+        /// Get the specific Token.
+        /// </summary>
+        /// <param name="index"></param>
+        /// <returns></returns>
+        public Token GetToken(int index)
+        {
+            Token t = token;
+            for (int i = 0; i < index; i++)
+            {
+                if (t.next != null) t = t.next;
+                else t = t.next = token_source.GetNextToken();
+            }
+            return t;
+        }
+
+        private int Jj_ntk()
+        {
+            if ((jj_nt = token.next) == null)
+                return (jj_ntk = (token.next = token_source.GetNextToken()).kind);
+            else
+                return (jj_ntk = jj_nt.kind);
+        }
+
+        private IList<int[]> jj_expentries = new List<int[]>();
+        private int[] jj_expentry;
+        private int jj_kind = -1;
+        private int[] jj_lasttokens = new int[100];
+        private int jj_endpos;
+
+        private void Jj_add_error_token(int kind, int pos)
+        {
+            if (pos >= 100) return;
+            if (pos == jj_endpos + 1)
+            {
+                jj_lasttokens[jj_endpos++] = kind;
+            }
+            else if (jj_endpos != 0)
+            {
+                jj_expentry = new int[jj_endpos];
+                for (int i = 0; i < jj_endpos; i++)
+                {
+                    jj_expentry[i] = jj_lasttokens[i];
+                }
+                foreach (var oldentry in jj_expentries)
+                {
+                    if (oldentry.Length == jj_expentry.Length)
+                    {
+                        for (int i = 0; i < jj_expentry.Length; i++)
+                        {
+                            if (oldentry[i] != jj_expentry[i])
+                            {
+                                continue;
+                            }
+                        }
+                        jj_expentries.Add(jj_expentry);
+                        break;
+                    }
+                }
+                if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind;
+            }
+        }
+
+        /// <summary>
+        /// Generate ParseException.
+        /// </summary>
+        /// <returns></returns>
+        public virtual ParseException GenerateParseException()
+        {
+            jj_expentries.Clear();
+            bool[] la1tokens = new bool[24];
+            if (jj_kind >= 0)
+            {
+                la1tokens[jj_kind] = true;
+                jj_kind = -1;
+            }
+            for (int i = 0; i < 10; i++)
+            {
+                if (jj_la1[i] == jj_gen)
+                {
+                    for (int j = 0; j < 32; j++)
+                    {
+                        if ((jj_la1_0[i] & (1 << j)) != 0)
+                        {
+                            la1tokens[j] = true;
+                        }
+                    }
+                }
+            }
+            for (int i = 0; i < 24; i++)
+            {
+                if (la1tokens[i])
+                {
+                    jj_expentry = new int[1];
+                    jj_expentry[0] = i;
+                    jj_expentries.Add(jj_expentry);
+                }
+            }
+            jj_endpos = 0;
+            Jj_rescan_token();
+            Jj_add_error_token(0, 0);
+            int[][] exptokseq = new int[jj_expentries.Count][];
+            for (int i = 0; i < jj_expentries.Count; i++)
+            {
+                exptokseq[i] = jj_expentries[i];
+            }
+            return new ParseException(token, exptokseq, QueryParserConstants.TokenImage);
+        }
+
+        /// <summary>Enable tracing. </summary>
+        public void Enable_tracing()
+        {
+        }
+
+        /// <summary>Disable tracing. </summary>
+        public void Disable_tracing()
+        {
+        }
+
+        private void Jj_rescan_token()
+        {
+            jj_rescan = true;
+            for (int i = 0; i < 1; i++)
+            {
+                try
+                {
+                    JJCalls p = jj_2_rtns[i];
+                    do
+                    {
+                        if (p.gen > jj_gen)
+                        {
+                            jj_la = p.arg; jj_lastpos = jj_scanpos = p.first;
+                            switch (i)
+                            {
+                                case 0: Jj_3_1(); break;
+                            }
+                        }
+                        p = p.next;
+                    } while (p != null);
+                }
+                catch (LookaheadSuccess ls) { }
+            }
+            jj_rescan = false;
+        }
+
+        private void Jj_save(int index, int xla)
+        {
+            JJCalls p = jj_2_rtns[index];
+            while (p.gen > jj_gen)
+            {
+                if (p.next == null) { p = p.next = new JJCalls(); break; }
+                p = p.next;
+            }
+            p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla;
+        }
+
+        internal sealed class JJCalls
+        {
+            internal int gen;
+            internal Token first;
+            internal int arg;
+            internal JJCalls next;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs b/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
new file mode 100644
index 0000000..262f76b
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
@@ -0,0 +1,120 @@
+\ufeffusing System;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public static class RegexpToken
+    {
+        /// <summary>End of File. </summary>
+        public const int EOF = 0;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _NUM_CHAR = 1;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _TERM_CHAR = 2;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _WHITESPACE = 3;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _STAR = 4;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _ONE_CHAR = 5;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _DISTOP_NUM = 6;
+        /// <summary>RegularExpression Id. </summary>
+        public const int OR = 8;
+        /// <summary>RegularExpression Id. </summary>
+        public const int AND = 9;
+        /// <summary>RegularExpression Id. </summary>
+        public const int NOT = 10;
+        /// <summary>RegularExpression Id. </summary>
+        public const int W = 11;
+        /// <summary>RegularExpression Id. </summary>
+        public const int N = 12;
+        /// <summary>RegularExpression Id. </summary>
+        public const int LPAREN = 13;
+        /// <summary>RegularExpression Id. </summary>
+        public const int RPAREN = 14;
+        /// <summary>RegularExpression Id. </summary>
+        public const int COMMA = 15;
+        /// <summary>RegularExpression Id. </summary>
+        public const int COLON = 16;
+        /// <summary>RegularExpression Id. </summary>
+        public const int CARAT = 17;
+        /// <summary>RegularExpression Id. </summary>
+        public const int TRUNCQUOTED = 18;
+        /// <summary>RegularExpression Id. </summary>
+        public const int QUOTED = 19;
+        /// <summary>RegularExpression Id. </summary>
+        public const int SUFFIXTERM = 20;
+        /// <summary>RegularExpression Id. </summary>
+        public const int TRUNCTERM = 21;
+        /// <summary>RegularExpression Id. </summary>
+        public const int TERM = 22;
+        /// <summary>RegularExpression Id. </summary>
+        public const int NUMBER = 23;
+    }
+
+    public static class LexicalToken
+    {
+        /// <summary>Lexical state.</summary>
+        public const int Boost = 0;
+        /// <summary>Lexical state.</summary>
+        public const int DEFAULT = 2;
+    }
+
+    // NOTE: In Java, this was an interface. However, in 
+    // .NET we cannot define constants in an interface.
+    // So, instead we are making it a static class so it 
+    // can be shared between classes with different base classes.
+
+    // public interface QueryParserConstants
+
+    /// <summary> Token literal values and constants.
+    /// Generated by org.javacc.parser.OtherFilesGen#start()
+    /// </summary>
+    public static class QueryParserConstants
+    {
+        /// <summary>Literal token values. </summary>
+        public static string[] TokenImage = new string[] {
+            "<EOF>",
+            "<_NUM_CHAR>",
+            "<_TERM_CHAR>",
+            "<_WHITESPACE>",
+            "\"*\"",
+            "\"?\"",
+            "<_DISTOP_NUM>",
+            "<token of kind 7>",
+            "<OR>",
+            "<AND>",
+            "<NOT>",
+            "<W>",
+            "<N>",
+            "\"(\"",
+            "\")\"",
+            "\",\"",
+            "\":\"",
+            "\"^\"",
+            "<TRUNCQUOTED>",
+            "<QUOTED>",
+            "<SUFFIXTERM>",
+            "<TRUNCTERM>",
+            "<TERM>",
+            "<NUMBER>"
+        };
+    }
+}
\ No newline at end of file


[45/50] [abbrv] lucenenet git commit: Fixed bug in Surround.QueryParser caused by mistranslation from Java of break

Posted by sy...@apache.org.
Fixed bug in Surround.QueryParser caused by mistranslation from Java of break <label> and continue <label>.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/bd781797
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/bd781797
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/bd781797

Branch: refs/heads/master
Commit: bd781797e28220ea00e9e149b8b6303b720ca980
Parents: 10dc873
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Fri Sep 2 23:16:35 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 23:16:35 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bd781797/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
index 49ef7d4..170eb74 100644
--- a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
@@ -542,7 +542,7 @@ namespace Lucene.Net.QueryParser.Surround.Parser
         public void OptionalWeights(SrndQuery q)
         {
             Token weight = null;
-        
+
             while (true)
             {
                 switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
@@ -797,13 +797,15 @@ namespace Lucene.Net.QueryParser.Surround.Parser
                         {
                             if (oldentry[i] != jj_expentry[i])
                             {
-                                continue;
+                                goto jj_entries_loop_continue;
                             }
                         }
                         jj_expentries.Add(jj_expentry);
-                        break;
+                        goto jj_entries_loop_break;
                     }
+                jj_entries_loop_continue: ;
                 }
+            jj_entries_loop_break:
                 if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind;
             }
         }


[05/50] [abbrv] lucenenet git commit: Marked all IncrementToken() methods sealed in test TokenFilter classes.

Posted by sy...@apache.org.
Marked all IncrementToken() methods sealed in test TokenFilter classes.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/544c6d44
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/544c6d44
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/544c6d44

Branch: refs/heads/master
Commit: 544c6d442032faeab482a0aee23f75381237a203
Parents: c9b96c8
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Jul 31 19:02:50 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:29:52 2016 +0700

----------------------------------------------------------------------
 Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs | 4 ++--
 Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs   | 2 +-
 Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs  | 4 ++--
 3 files changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/544c6d44/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs b/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
index fc1ce0c..c4f3a7b 100644
--- a/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
+++ b/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
@@ -137,7 +137,7 @@ namespace Lucene.Net.QueryParser.Classic
                 typeAtt = AddAttribute<ITypeAttribute>();
             }
 
-            public override bool IncrementToken()
+            public override sealed bool IncrementToken()
             {
                 if (multiToken > 0)
                 {
@@ -210,7 +210,7 @@ namespace Lucene.Net.QueryParser.Classic
                 posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
             }
 
-            public override bool IncrementToken()
+            public override sealed bool IncrementToken()
             {
                 while (input.IncrementToken())
                 {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/544c6d44/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs b/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
index 35acad2..d0e222d 100644
--- a/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
+++ b/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
@@ -414,7 +414,7 @@ namespace Lucene.Net.QueryParser.Classic
                 IPositionIncrementAttribute posIncAtt = AddAttribute<IPositionIncrementAttribute>();
             }
 
-            public override bool IncrementToken()
+            public override sealed bool IncrementToken()
             {
                 if (addSynonym)
                 { // inject our synonym

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/544c6d44/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs b/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
index f8e290e..282b355 100644
--- a/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
+++ b/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
@@ -61,7 +61,7 @@ namespace Lucene.Net.QueryParser.Util
             bool inPhrase = false;
             int savedStart = 0, savedEnd = 0;
 
-            public override bool IncrementToken()
+            public override sealed bool IncrementToken()
             {
                 if (inPhrase)
                 {
@@ -1343,7 +1343,7 @@ namespace Lucene.Net.QueryParser.Util
                 termAtt = AddAttribute<ICharTermAttribute>();
             }
 
-            public override bool IncrementToken()
+            public override sealed bool IncrementToken()
             {
                 if (input.IncrementToken())
                 {


[20/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Simple/TestSimpleQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Simple/TestSimpleQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Simple/TestSimpleQueryParser.cs
new file mode 100644
index 0000000..0a9d49f
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Simple/TestSimpleQueryParser.cs
@@ -0,0 +1,728 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using Lucene.Net.Util.Automaton;
+using NUnit.Framework;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Simple
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Tests for <see cref="SimpleQueryParser"/>
+    /// </summary>
+    [TestFixture]
+    public class TestSimpleQueryParser : LuceneTestCase
+    {
+        /// <summary>
+        /// helper to parse a query with whitespace+lowercase analyzer across "field",
+        /// with default operator of MUST
+        /// </summary>
+        /// <param name="text"></param>
+        /// <returns></returns>
+        private Query Parse(string text)
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+            SimpleQueryParser parser = new SimpleQueryParser(analyzer, "field");
+            parser.DefaultOperator = BooleanClause.Occur.MUST;
+            return parser.Parse(text);
+        }
+
+        /// <summary>
+        /// helper to parse a query with whitespace+lowercase analyzer across "field",
+        /// with default operator of MUST
+        /// </summary>
+        /// <param name="text"></param>
+        /// <param name="flags"></param>
+        /// <returns></returns>
+        private Query Parse(string text, int flags)
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+            SimpleQueryParser parser = new SimpleQueryParser(analyzer, new HashMap<string, float>() { { "field", 1f } }, flags);
+            parser.DefaultOperator = BooleanClause.Occur.MUST;
+            return parser.Parse(text);
+        }
+
+        /** test a simple term */
+        [Test]
+        public void TestTerm()
+        {
+            Query expected = new TermQuery(new Term("field", "foobar"));
+
+            assertEquals(expected, Parse("foobar"));
+        }
+
+        /** test a fuzzy query */
+        [Test]
+        public void TestFuzzy()
+        {
+            Query regular = new TermQuery(new Term("field", "foobar"));
+            Query expected = new FuzzyQuery(new Term("field", "foobar"), 2);
+
+            assertEquals(expected, Parse("foobar~2"));
+            assertEquals(regular, Parse("foobar~"));
+            assertEquals(regular, Parse("foobar~a"));
+            assertEquals(regular, Parse("foobar~1a"));
+
+            BooleanQuery @bool = new BooleanQuery();
+            FuzzyQuery fuzzy = new FuzzyQuery(new Term("field", "foo"), LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE);
+            @bool.Add(fuzzy, BooleanClause.Occur.MUST);
+            @bool.Add(new TermQuery(new Term("field", "bar")), BooleanClause.Occur.MUST);
+
+            assertEquals(@bool, Parse("foo~" + LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE + 1 + " bar"));
+        }
+
+        /** test a simple phrase */
+        [Test]
+        public void TestPhrase()
+        {
+            PhraseQuery expected = new PhraseQuery();
+            expected.Add(new Term("field", "foo"));
+            expected.Add(new Term("field", "bar"));
+
+            assertEquals(expected, Parse("\"foo bar\""));
+        }
+
+        /** test a simple phrase with various slop settings */
+        [Test]
+        public void TestPhraseWithSlop()
+        {
+            PhraseQuery expectedWithSlop = new PhraseQuery();
+            expectedWithSlop.Add(new Term("field", "foo"));
+            expectedWithSlop.Add(new Term("field", "bar"));
+            expectedWithSlop.Slop = (2);
+
+            assertEquals(expectedWithSlop, Parse("\"foo bar\"~2"));
+
+            PhraseQuery expectedWithMultiDigitSlop = new PhraseQuery();
+            expectedWithMultiDigitSlop.Add(new Term("field", "foo"));
+            expectedWithMultiDigitSlop.Add(new Term("field", "bar"));
+            expectedWithMultiDigitSlop.Slop = (10);
+
+            assertEquals(expectedWithMultiDigitSlop, Parse("\"foo bar\"~10"));
+
+            PhraseQuery expectedNoSlop = new PhraseQuery();
+            expectedNoSlop.Add(new Term("field", "foo"));
+            expectedNoSlop.Add(new Term("field", "bar"));
+
+            assertEquals("Ignore trailing tilde with no slop", expectedNoSlop, Parse("\"foo bar\"~"));
+            assertEquals("Ignore non-numeric trailing slop", expectedNoSlop, Parse("\"foo bar\"~a"));
+            assertEquals("Ignore non-numeric trailing slop", expectedNoSlop, Parse("\"foo bar\"~1a"));
+            assertEquals("Ignore negative trailing slop", expectedNoSlop, Parse("\"foo bar\"~-1"));
+
+            PhraseQuery pq = new PhraseQuery();
+            pq.Add(new Term("field", "foo"));
+            pq.Add(new Term("field", "bar"));
+            pq.Slop = (12);
+
+            BooleanQuery expectedBoolean = new BooleanQuery();
+            expectedBoolean.Add(pq, BooleanClause.Occur.MUST);
+            expectedBoolean.Add(new TermQuery(new Term("field", "baz")), BooleanClause.Occur.MUST);
+
+            assertEquals(expectedBoolean, Parse("\"foo bar\"~12 baz"));
+        }
+
+        /** test a simple prefix */
+        [Test]
+        public void TestPrefix()
+        {
+            PrefixQuery expected = new PrefixQuery(new Term("field", "foobar"));
+
+            assertEquals(expected, Parse("foobar*"));
+        }
+
+        /** test some AND'd terms using '+' operator */
+        [Test]
+        public void TestAND()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "bar")), BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("foo+bar"));
+        }
+
+        /** test some AND'd phrases using '+' operator */
+        [Test]
+        public void TestANDPhrase()
+        {
+            PhraseQuery phrase1 = new PhraseQuery();
+            phrase1.Add(new Term("field", "foo"));
+            phrase1.Add(new Term("field", "bar"));
+            PhraseQuery phrase2 = new PhraseQuery();
+            phrase2.Add(new Term("field", "star"));
+            phrase2.Add(new Term("field", "wars"));
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(phrase1, BooleanClause.Occur.MUST);
+            expected.Add(phrase2, BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("\"foo bar\"+\"star wars\""));
+        }
+
+        /** test some AND'd terms (just using whitespace) */
+        [Test]
+        public void TestANDImplicit()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "bar")), BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("foo bar"));
+        }
+
+        /** test some OR'd terms */
+        [Test]
+        public void TestOR()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "bar")), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("foo|bar"));
+            assertEquals(expected, Parse("foo||bar"));
+        }
+
+        /** test some OR'd terms (just using whitespace) */
+        [Test]
+        public void TestORImplicit()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "bar")), BooleanClause.Occur.SHOULD);
+
+            SimpleQueryParser parser = new SimpleQueryParser(new MockAnalyzer(Random()), "field");
+            assertEquals(expected, parser.Parse("foo bar"));
+        }
+
+        /** test some OR'd phrases using '|' operator */
+        [Test]
+        public void TestORPhrase()
+        {
+            PhraseQuery phrase1 = new PhraseQuery();
+            phrase1.Add(new Term("field", "foo"));
+            phrase1.Add(new Term("field", "bar"));
+            PhraseQuery phrase2 = new PhraseQuery();
+            phrase2.Add(new Term("field", "star"));
+            phrase2.Add(new Term("field", "wars"));
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(phrase1, BooleanClause.Occur.SHOULD);
+            expected.Add(phrase2, BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("\"foo bar\"|\"star wars\""));
+        }
+
+        /** test negated term */
+        [Test]
+        public void TestNOT()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.MUST_NOT);
+            expected.Add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("-foo"));
+            assertEquals(expected, Parse("-(foo)"));
+            assertEquals(expected, Parse("---foo"));
+        }
+
+        /** test crazy prefixes with multiple asterisks */
+        [Test]
+        public void TestCrazyPrefixes1()
+        {
+            Query expected = new PrefixQuery(new Term("field", "st*ar"));
+
+            assertEquals(expected, Parse("st*ar*"));
+        }
+
+        /** test prefixes with some escaping */
+        [Test]
+        public void TestCrazyPrefixes2()
+        {
+            Query expected = new PrefixQuery(new Term("field", "st*ar\\*"));
+
+            assertEquals(expected, Parse("st*ar\\\\**"));
+        }
+
+        /** not a prefix query! the prefix operator is escaped */
+        [Test]
+        public void TestTermInDisguise()
+        {
+            Query expected = new TermQuery(new Term("field", "st*ar\\*"));
+
+            assertEquals(expected, Parse("sT*Ar\\\\\\*"));
+        }
+
+        // a number of test cases here have garbage/errors in
+        // the syntax passed in to test that the query can
+        // still be interpreted as a guess to what the human
+        // input was trying to be
+
+        [Test]
+        public void TestGarbageTerm()
+        {
+            Query expected = new TermQuery(new Term("field", "star"));
+
+            assertEquals(expected, Parse("star"));
+            assertEquals(expected, Parse("star\n"));
+            assertEquals(expected, Parse("star\r"));
+            assertEquals(expected, Parse("star\t"));
+            assertEquals(expected, Parse("star("));
+            assertEquals(expected, Parse("star)"));
+            assertEquals(expected, Parse("star\""));
+            assertEquals(expected, Parse("\t \r\n\nstar   \n \r \t "));
+            assertEquals(expected, Parse("- + \"\" - star \\"));
+        }
+
+        [Test]
+        public void TestGarbageEmpty()
+        {
+            assertNull(Parse(""));
+            assertNull(Parse("  "));
+            assertNull(Parse("  "));
+            assertNull(Parse("\\ "));
+            assertNull(Parse("\\ \\ "));
+            assertNull(Parse("\"\""));
+            assertNull(Parse("\" \""));
+            assertNull(Parse("\" \"|\" \""));
+            assertNull(Parse("(\" \"|\" \")"));
+            assertNull(Parse("\" \" \" \""));
+            assertNull(Parse("(\" \" \" \")"));
+        }
+
+        [Test]
+        public void TestGarbageAND()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("star wars"));
+            assertEquals(expected, Parse("star+wars"));
+            assertEquals(expected, Parse("     star     wars   "));
+            assertEquals(expected, Parse("     star +    wars   "));
+            assertEquals(expected, Parse("  |     star + + |   wars   "));
+            assertEquals(expected, Parse("  |     star + + |   wars   \\"));
+        }
+
+        [Test]
+        public void TestGarbageOR()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("star|wars"));
+            assertEquals(expected, Parse("     star |    wars   "));
+            assertEquals(expected, Parse("  |     star | + |   wars   "));
+            assertEquals(expected, Parse("  +     star | + +   wars   \\"));
+        }
+
+        [Test]
+        public void TestGarbageNOT()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST_NOT);
+            expected.Add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("-star"));
+            assertEquals(expected, Parse("---star"));
+            assertEquals(expected, Parse("- -star -"));
+        }
+
+        [Test]
+        public void TestGarbagePhrase()
+        {
+            PhraseQuery expected = new PhraseQuery();
+            expected.Add(new Term("field", "star"));
+            expected.Add(new Term("field", "wars"));
+
+            assertEquals(expected, Parse("\"star wars\""));
+            assertEquals(expected, Parse("\"star wars\\ \""));
+            assertEquals(expected, Parse("\"\" | \"star wars\""));
+            assertEquals(expected, Parse("          \"star wars\"        \"\"\\"));
+        }
+
+        [Test]
+        public void TestGarbageSubquery()
+        {
+            Query expected = new TermQuery(new Term("field", "star"));
+
+            assertEquals(expected, Parse("(star)"));
+            assertEquals(expected, Parse("(star))"));
+            assertEquals(expected, Parse("((star)"));
+            assertEquals(expected, Parse("     -()(star)        \n\n\r     "));
+            assertEquals(expected, Parse("| + - ( + - |      star    \n      ) \n"));
+        }
+
+        [Test]
+        public void TestCompoundAnd()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("star wars empire"));
+            assertEquals(expected, Parse("star+wars + empire"));
+            assertEquals(expected, Parse(" | --star wars empire \n\\"));
+        }
+
+        [Test]
+        public void TestCompoundOr()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("star|wars|empire"));
+            assertEquals(expected, Parse("star|wars | empire"));
+            assertEquals(expected, Parse(" | --star|wars|empire \n\\"));
+        }
+
+        [Test]
+        public void TestComplex00()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery inner = new BooleanQuery();
+            inner.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.SHOULD);
+            inner.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner, BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("star|wars empire"));
+            assertEquals(expected, Parse("star|wars + empire"));
+            assertEquals(expected, Parse("star| + wars + ----empire |"));
+        }
+
+        [Test]
+        public void TestComplex01()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery inner = new BooleanQuery();
+            inner.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            inner.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
+            expected.Add(inner, BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("star wars | empire"));
+            assertEquals(expected, Parse("star + wars|empire"));
+            assertEquals(expected, Parse("star + | wars | ----empire +"));
+        }
+
+        [Test]
+        public void TestComplex02()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery inner = new BooleanQuery();
+            inner.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            inner.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
+            expected.Add(inner, BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "strikes")), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("star wars | empire | strikes"));
+            assertEquals(expected, Parse("star + wars|empire | strikes"));
+            assertEquals(expected, Parse("star + | wars | ----empire | + --strikes \\"));
+        }
+
+        [Test]
+        public void TestComplex03()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery inner = new BooleanQuery();
+            BooleanQuery inner2 = new BooleanQuery();
+            inner2.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            inner2.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
+            inner.Add(inner2, BooleanClause.Occur.SHOULD);
+            inner.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
+            inner.Add(new TermQuery(new Term("field", "strikes")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner, BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "back")), BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("star wars | empire | strikes back"));
+            assertEquals(expected, Parse("star + wars|empire | strikes + back"));
+            assertEquals(expected, Parse("star + | wars | ----empire | + --strikes + | --back \\"));
+        }
+
+        [Test]
+        public void TestComplex04()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery inner = new BooleanQuery();
+            BooleanQuery inner2 = new BooleanQuery();
+            inner.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            inner.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
+            inner2.Add(new TermQuery(new Term("field", "strikes")), BooleanClause.Occur.MUST);
+            inner2.Add(new TermQuery(new Term("field", "back")), BooleanClause.Occur.MUST);
+            expected.Add(inner, BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner2, BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("(star wars) | empire | (strikes back)"));
+            assertEquals(expected, Parse("(star + wars) |empire | (strikes + back)"));
+            assertEquals(expected, Parse("(star + | wars |) | ----empire | + --(strikes + | --back) \\"));
+        }
+
+        [Test]
+        public void TestComplex05()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery inner1 = new BooleanQuery();
+            BooleanQuery inner2 = new BooleanQuery();
+            BooleanQuery inner3 = new BooleanQuery();
+            BooleanQuery inner4 = new BooleanQuery();
+
+            expected.Add(inner1, BooleanClause.Occur.SHOULD);
+            expected.Add(inner2, BooleanClause.Occur.SHOULD);
+
+            inner1.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            inner1.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
+
+            inner2.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
+            inner2.Add(inner3, BooleanClause.Occur.SHOULD);
+
+            inner3.Add(new TermQuery(new Term("field", "strikes")), BooleanClause.Occur.MUST);
+            inner3.Add(new TermQuery(new Term("field", "back")), BooleanClause.Occur.MUST);
+            inner3.Add(inner4, BooleanClause.Occur.MUST);
+
+            inner4.Add(new TermQuery(new Term("field", "jarjar")), BooleanClause.Occur.MUST_NOT);
+            inner4.Add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("(star wars) | (empire | (strikes back -jarjar))"));
+            assertEquals(expected, Parse("(star + wars) |(empire | (strikes + back -jarjar) () )"));
+            assertEquals(expected, Parse("(star + | wars |) | --(--empire | + --(strikes + | --back + -jarjar) \"\" ) \""));
+        }
+
+        [Test]
+        public void TestComplex06()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery inner1 = new BooleanQuery();
+            BooleanQuery inner2 = new BooleanQuery();
+            BooleanQuery inner3 = new BooleanQuery();
+
+            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            expected.Add(inner1, BooleanClause.Occur.MUST);
+
+            inner1.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.SHOULD);
+            inner1.Add(inner2, BooleanClause.Occur.SHOULD);
+
+            inner2.Add(inner3, BooleanClause.Occur.MUST);
+            inner3.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
+            inner3.Add(new TermQuery(new Term("field", "strikes")), BooleanClause.Occur.SHOULD);
+            inner2.Add(new TermQuery(new Term("field", "back")), BooleanClause.Occur.MUST);
+            inner2.Add(new TermQuery(new Term("field", "jar+|jar")), BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("star (wars | (empire | strikes back jar\\+\\|jar))"));
+            assertEquals(expected, Parse("star + (wars |(empire | strikes + back jar\\+\\|jar) () )"));
+            assertEquals(expected, Parse("star + (| wars | | --(--empire | + --strikes + | --back + jar\\+\\|jar) \"\" ) \""));
+        }
+
+        /** test a term with field weights */
+        [Test]
+        public void TestWeightedTerm()
+        {
+            IDictionary<string, float> weights = new Dictionary<string, float>();
+            weights["field0"] = 5f;
+            weights["field1"] = 10f;
+
+            BooleanQuery expected = new BooleanQuery(true);
+            Query field0 = new TermQuery(new Term("field0", "foo"));
+            field0.Boost = (5f);
+            expected.Add(field0, BooleanClause.Occur.SHOULD);
+            Query field1 = new TermQuery(new Term("field1", "foo"));
+            field1.Boost = (10f);
+            expected.Add(field1, BooleanClause.Occur.SHOULD);
+
+            Analyzer analyzer = new MockAnalyzer(Random());
+            SimpleQueryParser parser = new SimpleQueryParser(analyzer, weights);
+            assertEquals(expected, parser.Parse("foo"));
+        }
+
+        /** test a more complex query with field weights */
+        [Test]
+        public void testWeightedOR()
+        {
+            IDictionary<string, float> weights = new Dictionary<string, float>();
+            weights["field0"] = 5f;
+            weights["field1"] = 10f;
+
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery foo = new BooleanQuery(true);
+            Query field0 = new TermQuery(new Term("field0", "foo"));
+            field0.Boost = (5f);
+            foo.Add(field0, BooleanClause.Occur.SHOULD);
+            Query field1 = new TermQuery(new Term("field1", "foo"));
+            field1.Boost = (10f);
+            foo.Add(field1, BooleanClause.Occur.SHOULD);
+            expected.Add(foo, BooleanClause.Occur.SHOULD);
+
+            BooleanQuery bar = new BooleanQuery(true);
+            field0 = new TermQuery(new Term("field0", "bar"));
+            field0.Boost = (5f);
+            bar.Add(field0, BooleanClause.Occur.SHOULD);
+            field1 = new TermQuery(new Term("field1", "bar"));
+            field1.Boost = (10f);
+            bar.Add(field1, BooleanClause.Occur.SHOULD);
+            expected.Add(bar, BooleanClause.Occur.SHOULD);
+
+            Analyzer analyzer = new MockAnalyzer(Random());
+            SimpleQueryParser parser = new SimpleQueryParser(analyzer, weights);
+            assertEquals(expected, parser.Parse("foo|bar"));
+        }
+
+        /** helper to parse a query with keyword analyzer across "field" */
+        private Query ParseKeyword(string text, int flags)
+        {
+            Analyzer analyzer = new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false);
+            SimpleQueryParser parser = new SimpleQueryParser(analyzer,
+                new HashMap<string, float>() { { "field", 1f } },
+                flags);
+            return parser.Parse(text);
+        }
+
+        /** test the ability to enable/disable phrase operator */
+        [Test]
+        public void TestDisablePhrase()
+        {
+            Query expected = new TermQuery(new Term("field", "\"test\""));
+            assertEquals(expected, ParseKeyword("\"test\"", SimpleQueryParser.PHRASE_OPERATOR));
+        }
+
+        /** test the ability to enable/disable prefix operator */
+        [Test]
+        public void TestDisablePrefix()
+        {
+            Query expected = new TermQuery(new Term("field", "test*"));
+            assertEquals(expected, ParseKeyword("test*", SimpleQueryParser.PREFIX_OPERATOR));
+        }
+
+        /** test the ability to enable/disable AND operator */
+        [Test]
+        public void TestDisableAND()
+        {
+            Query expected = new TermQuery(new Term("field", "foo+bar"));
+            assertEquals(expected, ParseKeyword("foo+bar", SimpleQueryParser.AND_OPERATOR));
+            expected = new TermQuery(new Term("field", "+foo+bar"));
+            assertEquals(expected, ParseKeyword("+foo+bar", SimpleQueryParser.AND_OPERATOR));
+        }
+
+        /** test the ability to enable/disable OR operator */
+        [Test]
+        public void TestDisableOR()
+        {
+            Query expected = new TermQuery(new Term("field", "foo|bar"));
+            assertEquals(expected, ParseKeyword("foo|bar", SimpleQueryParser.OR_OPERATOR));
+            expected = new TermQuery(new Term("field", "|foo|bar"));
+            assertEquals(expected, ParseKeyword("|foo|bar", SimpleQueryParser.OR_OPERATOR));
+        }
+
+        /** test the ability to enable/disable NOT operator */
+        [Test]
+        public void TestDisableNOT()
+        {
+            Query expected = new TermQuery(new Term("field", "-foo"));
+            assertEquals(expected, ParseKeyword("-foo", SimpleQueryParser.NOT_OPERATOR));
+        }
+
+        /** test the ability to enable/disable precedence operators */
+        [Test]
+        public void TestDisablePrecedence()
+        {
+            Query expected = new TermQuery(new Term("field", "(foo)"));
+            assertEquals(expected, ParseKeyword("(foo)", SimpleQueryParser.PRECEDENCE_OPERATORS));
+            expected = new TermQuery(new Term("field", ")foo("));
+            assertEquals(expected, ParseKeyword(")foo(", SimpleQueryParser.PRECEDENCE_OPERATORS));
+        }
+
+        /** test the ability to enable/disable escape operators */
+        [Test]
+        public void TestDisableEscape()
+        {
+            Query expected = new TermQuery(new Term("field", "foo\\bar"));
+            assertEquals(expected, ParseKeyword("foo\\bar", SimpleQueryParser.ESCAPE_OPERATOR));
+            assertEquals(expected, ParseKeyword("(foo\\bar)", SimpleQueryParser.ESCAPE_OPERATOR));
+            assertEquals(expected, ParseKeyword("\"foo\\bar\"", SimpleQueryParser.ESCAPE_OPERATOR));
+        }
+
+        [Test]
+        public void TestDisableWhitespace()
+        {
+            Query expected = new TermQuery(new Term("field", "foo foo"));
+            assertEquals(expected, ParseKeyword("foo foo", SimpleQueryParser.WHITESPACE_OPERATOR));
+            expected = new TermQuery(new Term("field", " foo foo\n "));
+            assertEquals(expected, ParseKeyword(" foo foo\n ", SimpleQueryParser.WHITESPACE_OPERATOR));
+            expected = new TermQuery(new Term("field", "\t\tfoo foo foo"));
+            assertEquals(expected, ParseKeyword("\t\tfoo foo foo", SimpleQueryParser.WHITESPACE_OPERATOR));
+        }
+
+        [Test]
+        public void TestDisableFuzziness()
+        {
+            Query expected = new TermQuery(new Term("field", "foo~1"));
+            assertEquals(expected, ParseKeyword("foo~1", SimpleQueryParser.FUZZY_OPERATOR));
+        }
+
+        [Test]
+        public void TestDisableSlop()
+        {
+            PhraseQuery expectedPhrase = new PhraseQuery();
+            expectedPhrase.Add(new Term("field", "foo"));
+            expectedPhrase.Add(new Term("field", "bar"));
+
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(expectedPhrase, BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "~2")), BooleanClause.Occur.MUST);
+            assertEquals(expected, Parse("\"foo bar\"~2", SimpleQueryParser.NEAR_OPERATOR));
+        }
+
+        // we aren't supposed to barf on any input...
+        [Test]
+        public void TestRandomQueries()
+        {
+            for (int i = 0; i < 1000; i++)
+            {
+                string query = TestUtil.RandomUnicodeString(Random());
+                Parse(query); // no exception
+                ParseKeyword(query, TestUtil.NextInt(Random(), 0, 1024)); // no exception
+            }
+        }
+
+        [Test]
+        public void testRandomQueries2()
+        {
+            char[] chars = new char[] { 'a', '1', '|', '&', ' ', '(', ')', '"', '-', '~' };
+            StringBuilder sb = new StringBuilder();
+            for (int i = 0; i < 1000; i++)
+            {
+                sb.Length = (0);
+                int queryLength = Random().Next(20);
+                for (int j = 0; j < queryLength; j++)
+                {
+                    sb.append(chars[Random().Next(chars.Length)]);
+                }
+                Parse(sb.toString()); // no exception
+                ParseKeyword(sb.toString(), TestUtil.NextInt(Random(), 0, 1024)); // no exception
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs b/src/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs
new file mode 100644
index 0000000..6f7fcfc
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs
@@ -0,0 +1,142 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Search;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public class BooleanQueryTst
+    {
+        private string queryText;
+        private readonly int[] expectedDocNrs;
+        private SingleFieldTestDb dBase;
+        private string fieldName;
+        private Assert testCase;
+        private BasicQueryFactory qf;
+        private bool verbose = true;
+
+        public BooleanQueryTst(
+            string queryText,
+            int[] expectedDocNrs,
+            SingleFieldTestDb dBase,
+            string fieldName,
+            Assert testCase,
+            BasicQueryFactory qf)
+        {
+            this.queryText = queryText;
+            this.expectedDocNrs = expectedDocNrs;
+            this.dBase = dBase;
+            this.fieldName = fieldName;
+            this.testCase = testCase;
+            this.qf = qf;
+        }
+
+        public virtual bool Verbose { set { this.verbose = value; } }
+
+        public virtual string QueryText { get { return this.queryText; } }
+
+        public virtual int[] ExpectedDocNrs { get { return this.expectedDocNrs; } }
+
+        internal class TestCollector : Collector
+        { // FIXME: use check hits from Lucene tests
+            private int totalMatched;
+            private bool[] encountered;
+            private Scorer scorer = null;
+            private int docBase = 0;
+            private BooleanQueryTst parent;
+
+            public TestCollector(BooleanQueryTst parent)
+            {
+                totalMatched = 0;
+                encountered = new bool[parent.expectedDocNrs.Length];
+                this.parent = parent;
+            }
+
+            public override Scorer Scorer
+            {
+                set { this.scorer = value; }
+            }
+
+            public override bool AcceptsDocsOutOfOrder()
+            {
+                return true;
+            }
+
+            public override AtomicReaderContext NextReader
+            {
+                set { docBase = value.DocBase; }
+            }
+
+            public override void Collect(int docNr)
+            {
+                float score = scorer.Score();
+                docNr += docBase;
+                /* System.out.println(docNr + " '" + dBase.getDocs()[docNr] + "': " + score); */
+                Assert.True(score > 0.0, parent.QueryText + ": positive score");
+                Assert.True(totalMatched < parent.ExpectedDocNrs.Length, parent.QueryText + ": too many hits");
+                int i;
+                for (i = 0; i < parent.expectedDocNrs.Length; i++)
+                {
+                    if ((!encountered[i]) && (parent.ExpectedDocNrs[i] == docNr))
+                    {
+                        encountered[i] = true;
+                        break;
+                    }
+                }
+                if (i == parent.ExpectedDocNrs.Length)
+                {
+                    Assert.True(false, parent.QueryText + ": doc nr for hit not expected: " + docNr);
+                }
+                totalMatched++;
+            }
+
+            public void CheckNrHits()
+            {
+                Assert.AreEqual(parent.ExpectedDocNrs.Length, totalMatched, parent.QueryText + ": nr of hits");
+            }
+        }
+
+        public void DoTest()
+        {
+
+            if (verbose)
+            {
+                Console.WriteLine("");
+                Console.WriteLine("Query: " + queryText);
+            }
+
+            SrndQuery lq = Parser.QueryParser.Parse(queryText);
+
+            /* if (verbose) System.out.println("Srnd: " + lq.toString()); */
+
+            Search.Query query = lq.MakeLuceneQueryField(fieldName, qf);
+            /* if (verbose) System.out.println("Lucene: " + query.toString()); */
+
+            TestCollector tc = new TestCollector(this);
+            using (IndexReader reader = DirectoryReader.Open(dBase.Db))
+            {
+                IndexSearcher searcher = new IndexSearcher(reader);
+
+                searcher.Search(query, tc);
+            }
+            tc.CheckNrHits();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Surround/Query/ExceptionQueryTst.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Surround/Query/ExceptionQueryTst.cs b/src/Lucene.Net.Tests.QueryParser/Surround/Query/ExceptionQueryTst.cs
new file mode 100644
index 0000000..7468ef9
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Surround/Query/ExceptionQueryTst.cs
@@ -0,0 +1,76 @@
+\ufeffusing Lucene.Net.QueryParser.Surround.Parser;
+using System;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public class ExceptionQueryTst
+    {
+        private string queryText;
+        private bool verbose;
+
+        public ExceptionQueryTst(string queryText, bool verbose)
+        {
+            this.queryText = queryText;
+            this.verbose = verbose;
+        }
+
+        public void DoTest(StringBuilder failQueries)
+        {
+            bool pass = false;
+            SrndQuery lq = null;
+            try
+            {
+                lq = Parser.QueryParser.Parse(queryText);
+                if (verbose)
+                {
+                    Console.WriteLine("Query: " + queryText + "\nParsed as: " + lq.ToString());
+                }
+            }
+            catch (ParseException e)
+            {
+                if (verbose)
+                {
+                    Console.WriteLine("Parse exception for query:\n"
+                                      + queryText + "\n"
+                                      + e.Message);
+                }
+                pass = true;
+            }
+            if (!pass)
+            {
+                failQueries.append(queryText);
+                failQueries.append("\nParsed as: ");
+                failQueries.append(lq.toString());
+                failQueries.append("\n");
+            }
+        }
+
+        public static string GetFailQueries(string[] exceptionQueries, bool verbose)
+        {
+            StringBuilder failQueries = new StringBuilder();
+            for (int i = 0; i < exceptionQueries.Length; i++)
+            {
+                new ExceptionQueryTst(exceptionQueries[i], verbose).DoTest(failQueries);
+            }
+            return failQueries.toString();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs b/src/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs
new file mode 100644
index 0000000..1221835
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs
@@ -0,0 +1,55 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.Store;
+using Lucene.Net.Util;
+using System;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public class SingleFieldTestDb
+    {
+        private Directory db;
+        private string[] docs;
+        private string fieldName;
+
+        public SingleFieldTestDb(Random random, string[] documents, string fName)
+        {
+            db = new MockDirectoryWrapper(random, new RAMDirectory());
+            docs = documents;
+            fieldName = fName;
+            using (IndexWriter writer = new IndexWriter(db, new IndexWriterConfig(
+                LuceneVersion.LUCENE_CURRENT,
+                new MockAnalyzer(random))))
+            {
+                for (int j = 0; j < docs.Length; j++)
+                {
+                    Document d = new Document();
+                    d.Add(new TextField(fieldName, docs[j], Field.Store.NO));
+                    writer.AddDocument(d);
+                }
+            }
+        }
+
+        public Directory Db { get { return db; } }
+        public string[] Docs { get { return docs; } }
+        public string Fieldname { get { return fieldName; } }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Surround/Query/SrndQueryTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Surround/Query/SrndQueryTest.cs b/src/Lucene.Net.Tests.QueryParser/Surround/Query/SrndQueryTest.cs
new file mode 100644
index 0000000..ebe7e2b
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Surround/Query/SrndQueryTest.cs
@@ -0,0 +1,48 @@
+\ufeffusing Lucene.Net.Search;
+using Lucene.Net.Util;
+using NUnit.Framework;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class SrndQueryTest : LuceneTestCase
+    {
+        private void CheckEqualParsings(string s1, string s2)
+        {
+            string fieldName = "foo";
+            BasicQueryFactory qf = new BasicQueryFactory(16);
+            Search.Query lq1, lq2;
+            lq1 = Parser.QueryParser.Parse(s1).MakeLuceneQueryField(fieldName, qf);
+            lq2 = Parser.QueryParser.Parse(s2).MakeLuceneQueryField(fieldName, qf);
+            QueryUtils.CheckEqual(lq1, lq2);
+        }
+
+        [Test]
+        public void TestHashEquals()
+        {
+            //grab some sample queries from Test02Boolean and Test03Distance and
+            //check there hashes and equals
+            CheckEqualParsings("word1 w word2", " word1  w  word2 ");
+            CheckEqualParsings("2N(w1,w2,w3)", " 2N(w1, w2 , w3)");
+            CheckEqualParsings("abc?", " abc? ");
+            CheckEqualParsings("w*rd?", " w*rd?");
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Surround/Query/Test01Exceptions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Surround/Query/Test01Exceptions.cs b/src/Lucene.Net.Tests.QueryParser/Surround/Query/Test01Exceptions.cs
new file mode 100644
index 0000000..6ebc87a
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Surround/Query/Test01Exceptions.cs
@@ -0,0 +1,72 @@
+\ufeffusing Lucene.Net.Util;
+using NUnit.Framework;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class Test01Exceptions_ : LuceneTestCase
+    {
+        /** Main for running test case by itself. */
+        //public static void Main(string[] args)
+        //{
+        //    TestRunner.run(new TestSuite(Test01Exceptions.class));
+        //}
+
+        private bool verbose = false; /* to show actual parsing error messages */
+        private readonly string fieldName = "bi";
+
+        string[] exceptionQueries = {
+            "*",
+            "a*",
+            "ab*",
+            "?",
+            "a?",
+            "ab?",
+            "a???b",
+            "a?",
+            "a*b?",
+            "word1 word2",
+            "word2 AND",
+            "word1 OR",
+            "AND(word2)",
+            "AND(word2,)",
+            "AND(word2,word1,)",
+            "OR(word2)",
+            "OR(word2 ,",
+            "OR(word2 , word1 ,)",
+            "xx NOT",
+            "xx (a AND b)",
+            "(a AND b",
+            "a OR b)",
+            "or(word2+ not ord+, and xyz,def)",
+            ""
+        };
+
+        [Test]
+        public void Test01Exceptions()
+        {
+            string m = ExceptionQueryTst.GetFailQueries(exceptionQueries, verbose);
+            if (m.Length > 0)
+            {
+                fail("No ParseException for:\n" + m);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Surround/Query/Test02Boolean.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Surround/Query/Test02Boolean.cs b/src/Lucene.Net.Tests.QueryParser/Surround/Query/Test02Boolean.cs
new file mode 100644
index 0000000..aef9279
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Surround/Query/Test02Boolean.cs
@@ -0,0 +1,178 @@
+\ufeffusing Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class Test02Boolean : LuceneTestCase
+    {
+        //public static void Main(string[] args) {
+        //    TestRunner.run(new TestSuite(Test02Boolean.class));
+        //}
+
+        private readonly string fieldName = "bi";
+        private bool verbose = false;
+        private int maxBasicQueries = 16;
+
+        string[] docs1 = {
+            "word1 word2 word3",
+            "word4 word5",
+            "ord1 ord2 ord3",
+            "orda1 orda2 orda3 word2 worda3",
+            "a c e a b c"
+        };
+
+        public override void SetUp()
+        {
+            base.SetUp();
+            db1 = new SingleFieldTestDb(Random(), docs1, fieldName);
+        }
+
+        private SingleFieldTestDb db1;
+
+
+        public void NormalTest1(String query, int[] expdnrs)
+        {
+            BooleanQueryTst bqt = new BooleanQueryTst(query, expdnrs, db1, fieldName, this,
+                                                        new BasicQueryFactory(maxBasicQueries));
+            bqt.Verbose = (verbose);
+            bqt.DoTest();
+        }
+
+        [Test]
+        public void Test02Terms01()
+        {
+            int[] expdnrs = { 0 }; NormalTest1("word1", expdnrs);
+        }
+        [Test]
+        public void Test02Terms02()
+        {
+            int[] expdnrs = { 0, 1, 3 }; NormalTest1("word*", expdnrs);
+        }
+        [Test]
+        public void Test02Terms03()
+        {
+            int[] expdnrs = { 2 }; NormalTest1("ord2", expdnrs);
+        }
+        [Test]
+        public void Test02Terms04()
+        {
+            int[] expdnrs = { }; NormalTest1("kxork*", expdnrs);
+        }
+        [Test]
+        public void Test02Terms05()
+        {
+            int[] expdnrs = { 0, 1, 3 }; NormalTest1("wor*", expdnrs);
+        }
+        [Test]
+        public void Test02Terms06()
+        {
+            int[] expdnrs = { }; NormalTest1("ab", expdnrs);
+        }
+
+        [Test]
+        public void Test02Terms10()
+        {
+            int[] expdnrs = { }; NormalTest1("abc?", expdnrs);
+        }
+        [Test]
+        public void Test02Terms13()
+        {
+            int[] expdnrs = { 0, 1, 3 }; NormalTest1("word?", expdnrs);
+        }
+        [Test]
+        public void Test02Terms14()
+        {
+            int[] expdnrs = { 0, 1, 3 }; NormalTest1("w?rd?", expdnrs);
+        }
+        [Test]
+        public void Test02Terms20()
+        {
+            int[] expdnrs = { 0, 1, 3 }; NormalTest1("w*rd?", expdnrs);
+        }
+        [Test]
+        public void Test02Terms21()
+        {
+            int[] expdnrs = { 3 }; NormalTest1("w*rd??", expdnrs);
+        }
+        [Test]
+        public void Test02Terms22()
+        {
+            int[] expdnrs = { 3 }; NormalTest1("w*?da?", expdnrs);
+        }
+        [Test]
+        public void Test02Terms23()
+        {
+            int[] expdnrs = { }; NormalTest1("w?da?", expdnrs);
+        }
+
+        [Test]
+        public void Test03And01()
+        {
+            int[] expdnrs = { 0 }; NormalTest1("word1 AND word2", expdnrs);
+        }
+        [Test]
+        public void Test03And02()
+        {
+            int[] expdnrs = { 3 }; NormalTest1("word* and ord*", expdnrs);
+        }
+        [Test]
+        public void Test03And03()
+        {
+            int[] expdnrs = { 0 }; NormalTest1("and(word1,word2)", expdnrs);
+        }
+        [Test]
+        public void Test04Or01()
+        {
+            int[] expdnrs = { 0, 3 }; NormalTest1("word1 or word2", expdnrs);
+        }
+        [Test]
+        public void Test04Or02()
+        {
+            int[] expdnrs = { 0, 1, 2, 3 }; NormalTest1("word* OR ord*", expdnrs);
+        }
+        [Test]
+        public void Test04Or03()
+        {
+            int[] expdnrs = { 0, 3 }; NormalTest1("OR (word1, word2)", expdnrs);
+        }
+        [Test]
+        public void Test05Not01()
+        {
+            int[] expdnrs = { 3 }; NormalTest1("word2 NOT word1", expdnrs);
+        }
+        [Test]
+        public void Test05Not02()
+        {
+            int[] expdnrs = { 0 }; NormalTest1("word2* not ord*", expdnrs);
+        }
+        [Test]
+        public void Test06AndOr01()
+        {
+            int[] expdnrs = { 0 }; NormalTest1("(word1 or ab)and or(word2,xyz, defg)", expdnrs);
+        }
+        [Test]
+        public void Test07AndOrNot02()
+        {
+            int[] expdnrs = { 0 }; NormalTest1("or( word2* not ord*, and(xyz,def))", expdnrs);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Surround/Query/Test03Distance.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Surround/Query/Test03Distance.cs b/src/Lucene.Net.Tests.QueryParser/Surround/Query/Test03Distance.cs
new file mode 100644
index 0000000..6a19cb7
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Surround/Query/Test03Distance.cs
@@ -0,0 +1,341 @@
+\ufeffusing Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class Test03Distance : LuceneTestCase
+    {
+        //public static void Main(string[] args) {
+        //    TestRunner.run(new TestSuite(Test03Distance.class));
+        //}
+
+        private bool verbose = false;
+        private int maxBasicQueries = 16;
+
+        private string[] exceptionQueries = {
+            "(aa and bb) w cc",
+            "(aa or bb) w (cc and dd)",
+            "(aa opt bb) w cc",
+            "(aa not bb) w cc",
+            "(aa or bb) w (bi:cc)",
+            "(aa or bb) w bi:cc",
+            "(aa or bi:bb) w cc",
+            "(aa or (bi:bb)) w cc",
+            "(aa or (bb and dd)) w cc"
+        };
+
+        [Test]
+        public void Test00Exceptions()
+        {
+            string m = ExceptionQueryTst.GetFailQueries(exceptionQueries, verbose);
+            if (m.Length > 0)
+            {
+                fail("No ParseException for:\n" + m);
+            }
+        }
+
+        private readonly string fieldName = "bi";
+
+        private string[] docs1 = {
+            "word1 word2 word3",
+            "word4 word5",
+            "ord1 ord2 ord3",
+            "orda1 orda2 orda3 word2 worda3",
+            "a c e a b c"
+        };
+
+        SingleFieldTestDb db1;
+
+        public override void SetUp()
+        {
+            base.SetUp();
+            db1 = new SingleFieldTestDb(Random(), docs1, fieldName);
+            db2 = new SingleFieldTestDb(Random(), docs2, fieldName);
+            db3 = new SingleFieldTestDb(Random(), docs3, fieldName);
+        }
+
+        private void DistanceTst(String query, int[] expdnrs, SingleFieldTestDb db)
+        {
+            BooleanQueryTst bqt = new BooleanQueryTst(query, expdnrs, db, fieldName, this,
+                                                        new BasicQueryFactory(maxBasicQueries));
+            bqt.Verbose = (verbose);
+            bqt.DoTest();
+        }
+
+        public void DistanceTest1(string query, int[] expdnrs)
+        {
+            DistanceTst(query, expdnrs, db1);
+        }
+
+        [Test]
+        public void Test0W01()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word1 w word2", expdnrs);
+        }
+        [Test]
+        public void Test0N01()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word1 n word2", expdnrs);
+        }
+        [Test]
+        public void Test0N01r()
+        { /* r reverse */
+            int[] expdnrs = { 0 }; DistanceTest1("word2 n word1", expdnrs);
+        }
+        [Test]
+        public void Test0W02()
+        {
+            int[] expdnrs = { }; DistanceTest1("word2 w word1", expdnrs);
+        }
+        [Test]
+        public void Test0W03()
+        {
+            int[] expdnrs = { }; DistanceTest1("word2 2W word1", expdnrs);
+        }
+        [Test]
+        public void Test0N03()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word2 2N word1", expdnrs);
+        }
+        [Test]
+        public void Test0N03r()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word1 2N word2", expdnrs);
+        }
+
+        [Test]
+        public void Test0W04()
+        {
+            int[] expdnrs = { }; DistanceTest1("word2 3w word1", expdnrs);
+        }
+
+        [Test]
+        public void Test0N04()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word2 3n word1", expdnrs);
+        }
+        [Test]
+        public void Test0N04r()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word1 3n word2", expdnrs);
+        }
+
+        [Test]
+        public void Test0W05()
+        {
+            int[] expdnrs = { }; DistanceTest1("orda1 w orda3", expdnrs);
+        }
+        [Test]
+        public void Test0W06()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("orda1 2w orda3", expdnrs);
+        }
+
+        [Test]
+        public void Test1Wtrunc01()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word1* w word2", expdnrs);
+        }
+        [Test]
+        public void Test1Wtrunc02()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word* w word2", expdnrs);
+        }
+        [Test]
+        public void Test1Wtrunc02r()
+        {
+            int[] expdnrs = { 0, 3 }; DistanceTest1("word2 w word*", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc02()
+        {
+            int[] expdnrs = { 0, 3 }; DistanceTest1("word* n word2", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc02r()
+        {
+            int[] expdnrs = { 0, 3 }; DistanceTest1("word2 n word*", expdnrs);
+        }
+
+        [Test]
+        public void Test1Wtrunc03()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word1* w word2*", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc03()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word1* N word2*", expdnrs);
+        }
+
+        [Test]
+        public void Test1Wtrunc04()
+        {
+            int[] expdnrs = { }; DistanceTest1("kxork* w kxor*", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc04()
+        {
+            int[] expdnrs = { }; DistanceTest1("kxork* 99n kxor*", expdnrs);
+        }
+
+        [Test]
+        public void Test1Wtrunc05()
+        {
+            int[] expdnrs = { }; DistanceTest1("word2* 2W word1*", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc05()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word2* 2N word1*", expdnrs);
+        }
+
+        [Test]
+        public void Test1Wtrunc06()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("ord* W word*", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc06()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("ord* N word*", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc06r()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("word* N ord*", expdnrs);
+        }
+
+        [Test]
+        public void Test1Wtrunc07()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("(orda2 OR orda3) W word*", expdnrs);
+        }
+        [Test]
+        public void Test1Wtrunc08()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("(orda2 OR orda3) W (word2 OR worda3)", expdnrs);
+        }
+        [Test]
+        public void Test1Wtrunc09()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("(orda2 OR orda3) 2W (word2 OR worda3)", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc09()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("(orda2 OR orda3) 2N (word2 OR worda3)", expdnrs);
+        }
+
+        string[] docs2 = {
+            "w1 w2 w3 w4 w5",
+            "w1 w3 w2 w3",
+            ""
+        };
+
+        SingleFieldTestDb db2;
+
+        public void DistanceTest2(string query, int[] expdnrs)
+        {
+            DistanceTst(query, expdnrs, db2);
+        }
+
+        [Test]
+        public void Test2Wprefix01()
+        {
+            int[] expdnrs = { 0 }; DistanceTest2("W (w1, w2, w3)", expdnrs);
+        }
+        [Test]
+        public void Test2Nprefix01a()
+        {
+            int[] expdnrs = { 0, 1 }; DistanceTest2("N(w1, w2, w3)", expdnrs);
+        }
+        [Test]
+        public void Test2Nprefix01b()
+        {
+            int[] expdnrs = { 0, 1 }; DistanceTest2("N(w3, w1, w2)", expdnrs);
+        }
+
+        [Test]
+        public void Test2Wprefix02()
+        {
+            int[] expdnrs = { 0, 1 }; DistanceTest2("2W(w1,w2,w3)", expdnrs);
+        }
+
+        [Test]
+        public void Test2Nprefix02a()
+        {
+            int[] expdnrs = { 0, 1 }; DistanceTest2("2N(w1,w2,w3)", expdnrs);
+        }
+        [Test]
+        public void Test2Nprefix02b()
+        {
+            int[] expdnrs = { 0, 1 }; DistanceTest2("2N(w2,w3,w1)", expdnrs);
+        }
+
+        [Test]
+        public void Test2Wnested01()
+        {
+            int[] expdnrs = { 0 }; DistanceTest2("w1 W w2 W w3", expdnrs);
+        }
+        [Test]
+        public void Test2Nnested01()
+        {
+            int[] expdnrs = { 0 }; DistanceTest2("w1 N w2 N w3", expdnrs);
+        }
+
+        [Test]
+        public void Test2Wnested02()
+        {
+            int[] expdnrs = { 0, 1 }; DistanceTest2("w1 2W w2 2W w3", expdnrs);
+        }
+        [Test]
+        public void Test2Nnested02()
+        {
+            int[] expdnrs = { 0, 1 }; DistanceTest2("w1 2N w2 2N w3", expdnrs);
+        }
+
+        string[] docs3 = {
+            "low pressure temperature inversion and rain",
+            "when the temperature has a negative height above a depression no precipitation gradient is expected",
+            "when the temperature has a negative height gradient above a depression no precipitation is expected",
+            ""
+        };
+
+        SingleFieldTestDb db3;
+
+        public void DistanceTest3(string query, int[] expdnrs)
+        {
+            DistanceTst(query, expdnrs, db3);
+        }
+
+        [Test]
+        public void Test3Example01()
+        {
+            int[] expdnrs = { 0, 2 }; // query does not match doc 1 because "gradient" is in wrong place there.
+            DistanceTest3("50n((low w pressure*) or depression*,"
+                           + "5n(temperat*, (invers* or (negativ* 3n gradient*))),"
+                           + "rain* or precipitat*)",
+                           expdnrs);
+        }
+    }
+}


[39/50] [abbrv] lucenenet git commit: Added missing documentation to SpanNearClauseFactory

Posted by sy...@apache.org.
Added missing documentation to SpanNearClauseFactory


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/2efd9b41
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/2efd9b41
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/2efd9b41

Branch: refs/heads/master
Commit: 2efd9b4104fb9220d73e74b3042fd468ee637171
Parents: 7b7b634
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Tue Aug 2 18:38:01 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:31:05 2016 +0700

----------------------------------------------------------------------
 .../Surround/Query/SpanNearClauseFactory.cs     | 31 ++++++++++++++++++++
 1 file changed, 31 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2efd9b41/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs b/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
index 1465462..aa6a1d4 100644
--- a/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
@@ -24,6 +24,37 @@ namespace Lucene.Net.QueryParser.Surround.Query
      */
 
     /// <summary>
+    /// SpanNearClauseFactory:
+    /// 
+    /// Operations:
+    /// 
+    /// - create for a field name and an indexreader.
+    /// 
+    /// - add a weighted Term
+    /// this should add a corresponding SpanTermQuery, or
+    /// increase the weight of an existing one.
+    /// - add a weighted subquery SpanNearQuery
+    /// 
+    /// - create a clause for SpanNearQuery from the things added above.
+    /// For this, create an array of SpanQuery's from the added ones.
+    /// The clause normally is a SpanOrQuery over the added subquery SpanNearQuery
+    /// the SpanTermQuery's for the added Term's
+    /// 
+    /// When  it is necessary to suppress double subqueries as much as possible:
+    /// GetHashCode() and Equals() on unweighted SpanQuery are needed (possibly via GetTerms(),
+    /// the terms are individually hashable).
+    /// Idem SpanNearQuery: hash on the subqueries and the slop.
+    /// Evt. merge SpanNearQuery's by adding the weights of the corresponding subqueries.
+    /// 
+    /// To be determined:
+    /// Are SpanQuery weights handled correctly during search by Lucene?
+    /// Should the resulting SpanOrQuery be sorted?
+    /// Could other SpanQueries be added for use in this factory:
+    /// - SpanOrQuery: in principle yes, but it only has access to it's terms
+    ///                via getTerms(); are the corresponding weights available?
+    /// - SpanFirstQuery: treat similar to subquery SpanNearQuery. (ok?)
+    /// - SpanNotQuery: treat similar to subquery SpanNearQuery. (ok?)
+    /// 
     /// Factory for <see cref="SpanOrQuery"/>
     /// </summary>
     public class SpanNearClauseFactory


[47/50] [abbrv] lucenenet git commit: Fixed string formatting bugs in QueryParser.Analyzing.AnalyzingQueryParser

Posted by sy...@apache.org.
Fixed string formatting bugs in QueryParser.Analyzing.AnalyzingQueryParser


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/bf635018
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/bf635018
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/bf635018

Branch: refs/heads/master
Commit: bf635018b7eac84c3dd0ca94a76246eea673914e
Parents: 193c348
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sat Sep 3 00:34:59 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sat Sep 3 00:34:59 2016 +0700

----------------------------------------------------------------------
 .../Analyzing/AnalyzingQueryParser.cs                     | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/bf635018/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs b/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
index d61b9d0..a98a26d 100644
--- a/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
@@ -68,12 +68,12 @@ namespace Lucene.Net.QueryParser.Analyzing
             if (termStr == null)
             {
                 //can't imagine this would ever happen
-                throw new ParseException("Passed null value as term to getWildcardQuery");
+                throw new ParseException("Passed null value as term to GetWildcardQuery");
             }
             if (!AllowLeadingWildcard && (termStr.StartsWith("*") || termStr.StartsWith("?")))
             {
                 throw new ParseException("'*' or '?' not allowed as first character in WildcardQuery"
-                                        + " unless getAllowLeadingWildcard() returns true");
+                                        + " unless AllowLeadingWildcard returns true");
             }
 
             Match wildcardMatcher = wildcardPattern.Match(termStr);
@@ -172,7 +172,7 @@ namespace Lucene.Net.QueryParser.Analyzing
                     if (null != multipleOutputs)
                     {
                         throw new ParseException(
-                            string.Format(Locale, "Analyzer created multiple terms for \"%s\": %s", chunk, multipleOutputs.ToString()));
+                            string.Format(Locale, @"Analyzer created multiple terms for ""{0}"": {1}", chunk, multipleOutputs.ToString()));
                     }
                 }
                 else
@@ -180,13 +180,13 @@ namespace Lucene.Net.QueryParser.Analyzing
                     // nothing returned by analyzer.  Was it a stop word and the user accidentally
                     // used an analyzer with stop words?
                     stream.End();
-                    throw new ParseException(string.Format(Locale, "Analyzer returned nothing for \"%s\"", chunk));
+                    throw new ParseException(string.Format(Locale, @"Analyzer returned nothing for ""{0}""", chunk));
                 }
             }
             catch (System.IO.IOException e)
             {
                 throw new ParseException(
-                    string.Format(Locale, "IO error while trying to analyze single term: \"%s\"", termStr));
+                    string.Format(Locale, @"IO error while trying to analyze single term: ""{0}""", termStr));
             }
             finally
             {


[16/50] [abbrv] lucenenet git commit: Ported QueryParser.Surround namespace + tests.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs b/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
new file mode 100644
index 0000000..ac3d611
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
@@ -0,0 +1,760 @@
+\ufeffusing System;
+using System.Diagnostics.CodeAnalysis;
+using System.IO;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Token Manager.
+    /// </summary>
+    public class QueryParserTokenManager //: QueryParserConstants
+    {
+        private void InitBlock()
+        {
+            StreamWriter temp_writer;
+            temp_writer = new StreamWriter(Console.OpenStandardOutput(), Console.Out.Encoding);
+            temp_writer.AutoFlush = true;
+            debugStream = temp_writer;
+        }
+
+        /// <summary>Debug output. </summary>
+        public StreamWriter debugStream;
+        /// <summary>Set debug output. </summary>
+        public virtual void SetDebugStream(StreamWriter ds)
+        {
+            debugStream = ds;
+        }
+        private int JjStopStringLiteralDfa_1(int pos, long active0)
+        {
+            switch (pos)
+            {
+                default:
+                    return -1;
+            }
+        }
+        private int JjStartNfa_1(int pos, long active0)
+        {
+            return JjMoveNfa_1(JjStopStringLiteralDfa_1(pos, active0), pos + 1);
+        }
+        private int JjStopAtPos(int pos, int kind)
+        {
+            jjmatchedKind = kind;
+            jjmatchedPos = pos;
+            return pos + 1;
+        }
+        private int jjMoveStringLiteralDfa0_1()
+        {
+            switch (curChar)
+            {
+                case (char)40:
+                    return JjStopAtPos(0, 13);
+                case (char)41:
+                    return JjStopAtPos(0, 14);
+                case (char)44:
+                    return JjStopAtPos(0, 15);
+                case (char)58:
+                    return JjStopAtPos(0, 16);
+                case (char)94:
+                    return JjStopAtPos(0, 17);
+                default:
+                    return JjMoveNfa_1(0, 0);
+            }
+        }
+        internal static readonly ulong[] jjbitVec0 = {
+            0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL
+        };
+        internal static readonly ulong[] jjbitVec2 = {
+            0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
+        };
+        private int JjMoveNfa_1(int startState, int curPos)
+        {
+            int startsAt = 0;
+            jjnewStateCnt = 38;
+            int i = 1;
+            jjstateSet[0] = startState;
+            int kind = 0x7fffffff;
+            for (; ; )
+            {
+                if (++jjround == 0x7fffffff)
+                    ReInitRounds();
+                if (curChar < 64)
+                {
+                    ulong l = (ulong)(1L << (int)curChar);
+                    do
+                    {
+                        switch (jjstateSet[--i])
+                        {
+                            case 0:
+                                if ((0x7bffe8faffffd9ffL & l) != 0L)
+                                {
+                                    if (kind > 22)
+                                        kind = 22;
+                                    JjCheckNAddStates(0, 4);
+                                }
+                                else if ((0x100002600L & l) != 0L)
+                                {
+                                    if (kind > 7)
+                                        kind = 7;
+                                }
+                                else if (curChar == 34)
+                                    JjCheckNAddStates(5, 7);
+                                if ((0x3fc000000000000L & l) != 0L)
+                                    JjCheckNAddStates(8, 11);
+                                else if (curChar == 49)
+                                    JjCheckNAddTwoStates(20, 21);
+                                break;
+                            case 19:
+                                if ((0x3fc000000000000L & l) != 0L)
+                                    JjCheckNAddStates(8, 11);
+                                break;
+                            case 20:
+                                if ((0x3ff000000000000L & l) != 0L)
+                                    JjCheckNAdd(17);
+                                break;
+                            case 21:
+                                if ((0x3ff000000000000L & l) != 0L)
+                                    JjCheckNAdd(18);
+                                break;
+                            case 22:
+                                if (curChar == 49)
+                                    JjCheckNAddTwoStates(20, 21);
+                                break;
+                            case 23:
+                                if (curChar == 34)
+                                    JjCheckNAddStates(5, 7);
+                                break;
+                            case 24:
+                                if ((0xfffffffbffffffffL & l) != (ulong)0L)
+                                    JjCheckNAddTwoStates(24, 25);
+                                break;
+                            case 25:
+                                if (curChar == 34)
+                                    jjstateSet[jjnewStateCnt++] = 26;
+                                break;
+                            case 26:
+                                if (curChar == 42 && kind > 18)
+                                    kind = 18;
+                                break;
+                            case 27:
+                                if ((0xfffffffbffffffffL & l) != (ulong)0L)
+                                    JjCheckNAddStates(12, 14);
+                                break;
+                            case 29:
+                                if (curChar == 34)
+                                    JjCheckNAddStates(12, 14);
+                                break;
+                            case 30:
+                                if (curChar == 34 && kind > 19)
+                                    kind = 19;
+                                break;
+                            case 31:
+                                if ((0x7bffe8faffffd9ffL & l) == 0L)
+                                    break;
+                                if (kind > 22)
+                                    kind = 22;
+                                JjCheckNAddStates(0, 4);
+                                break;
+                            case 32:
+                                if ((0x7bffe8faffffd9ffL & l) != 0L)
+                                    JjCheckNAddTwoStates(32, 33);
+                                break;
+                            case 33:
+                                if (curChar == 42 && kind > 20)
+                                    kind = 20;
+                                break;
+                            case 34:
+                                if ((0x7bffe8faffffd9ffL & l) != 0L)
+                                    JjCheckNAddTwoStates(34, 35);
+                                break;
+                            case 35:
+                                if ((0x8000040000000000L & l) == (ulong)0L)
+                                    break;
+                                if (kind > 21)
+                                    kind = 21;
+                                JjCheckNAddTwoStates(35, 36);
+                                break;
+                            case 36:
+                                if ((0xfbffecfaffffd9ffL & l) == (ulong)0L)
+                                    break;
+                                if (kind > 21)
+                                    kind = 21;
+                                JjCheckNAdd(36);
+                                break;
+                            case 37:
+                                if ((0x7bffe8faffffd9ffL & l) == 0L)
+                                    break;
+                                if (kind > 22)
+                                    kind = 22;
+                                JjCheckNAdd(37);
+                                break;
+                            default: break;
+                        }
+                    } while (i != startsAt);
+                }
+                else if (curChar < 128)
+                {
+                    // NOTE: See the note in the Classic.QueryParserTokenManager.cs file.
+                    // I am working under the assumption 63 is the correct value, since it
+                    // made the tests pass there.
+                    ulong l = (ulong)(1L << (curChar & 63));
+                    //long l = 1L << (curChar & 077);
+                    do
+                    {
+                        switch (jjstateSet[--i])
+                        {
+                            case 0:
+                                if ((0xffffffffbfffffffL & l) != (ulong)0L)
+                                {
+                                    if (kind > 22)
+                                        kind = 22;
+                                    JjCheckNAddStates(0, 4);
+                                }
+                                if ((0x400000004000L & l) != 0L)
+                                {
+                                    if (kind > 12)
+                                        kind = 12;
+                                }
+                                else if ((0x80000000800000L & l) != 0L)
+                                {
+                                    if (kind > 11)
+                                        kind = 11;
+                                }
+                                else if (curChar == 97)
+                                    jjstateSet[jjnewStateCnt++] = 9;
+                                else if (curChar == 65)
+                                    jjstateSet[jjnewStateCnt++] = 6;
+                                else if (curChar == 111)
+                                    jjstateSet[jjnewStateCnt++] = 3;
+                                else if (curChar == 79)
+                                    jjstateSet[jjnewStateCnt++] = 1;
+                                if (curChar == 110)
+                                    jjstateSet[jjnewStateCnt++] = 15;
+                                else if (curChar == 78)
+                                    jjstateSet[jjnewStateCnt++] = 12;
+                                break;
+                            case 1:
+                                if (curChar == 82 && kind > 8)
+                                    kind = 8;
+                                break;
+                            case 2:
+                                if (curChar == 79)
+                                    jjstateSet[jjnewStateCnt++] = 1;
+                                break;
+                            case 3:
+                                if (curChar == 114 && kind > 8)
+                                    kind = 8;
+                                break;
+                            case 4:
+                                if (curChar == 111)
+                                    jjstateSet[jjnewStateCnt++] = 3;
+                                break;
+                            case 5:
+                                if (curChar == 68 && kind > 9)
+                                    kind = 9;
+                                break;
+                            case 6:
+                                if (curChar == 78)
+                                    jjstateSet[jjnewStateCnt++] = 5;
+                                break;
+                            case 7:
+                                if (curChar == 65)
+                                    jjstateSet[jjnewStateCnt++] = 6;
+                                break;
+                            case 8:
+                                if (curChar == 100 && kind > 9)
+                                    kind = 9;
+                                break;
+                            case 9:
+                                if (curChar == 110)
+                                    jjstateSet[jjnewStateCnt++] = 8;
+                                break;
+                            case 10:
+                                if (curChar == 97)
+                                    jjstateSet[jjnewStateCnt++] = 9;
+                                break;
+                            case 11:
+                                if (curChar == 84 && kind > 10)
+                                    kind = 10;
+                                break;
+                            case 12:
+                                if (curChar == 79)
+                                    jjstateSet[jjnewStateCnt++] = 11;
+                                break;
+                            case 13:
+                                if (curChar == 78)
+                                    jjstateSet[jjnewStateCnt++] = 12;
+                                break;
+                            case 14:
+                                if (curChar == 116 && kind > 10)
+                                    kind = 10;
+                                break;
+                            case 15:
+                                if (curChar == 111)
+                                    jjstateSet[jjnewStateCnt++] = 14;
+                                break;
+                            case 16:
+                                if (curChar == 110)
+                                    jjstateSet[jjnewStateCnt++] = 15;
+                                break;
+                            case 17:
+                                if ((0x80000000800000L & l) != 0L && kind > 11)
+                                    kind = 11;
+                                break;
+                            case 18:
+                                if ((0x400000004000L & l) != 0L && kind > 12)
+                                    kind = 12;
+                                break;
+                            case 24:
+                                JjAddStates(15, 16);
+                                break;
+                            case 27:
+                                if ((0xffffffffefffffffL & l) != (ulong)0L)
+                                    JjCheckNAddStates(12, 14);
+                                break;
+                            case 28:
+                                if (curChar == 92)
+                                    jjstateSet[jjnewStateCnt++] = 29;
+                                break;
+                            case 29:
+                                if (curChar == 92)
+                                    JjCheckNAddStates(12, 14);
+                                break;
+                            case 31:
+                                if ((0xffffffffbfffffffL & l) == (ulong)0L)
+                                    break;
+                                if (kind > 22)
+                                    kind = 22;
+                                JjCheckNAddStates(0, 4);
+                                break;
+                            case 32:
+                                if ((0xffffffffbfffffffL & l) != (ulong)0L)
+                                    JjCheckNAddTwoStates(32, 33);
+                                break;
+                            case 34:
+                                if ((0xffffffffbfffffffL & l) != (ulong)0L)
+                                    JjCheckNAddTwoStates(34, 35);
+                                break;
+                            case 36:
+                                if ((0xffffffffbfffffffL & l) == (ulong)0L)
+                                    break;
+                                if (kind > 21)
+                                    kind = 21;
+                                jjstateSet[jjnewStateCnt++] = 36;
+                                break;
+                            case 37:
+                                if ((0xffffffffbfffffffL & l) == (ulong)0L)
+                                    break;
+                                if (kind > 22)
+                                    kind = 22;
+                                JjCheckNAdd(37);
+                                break;
+                            default: break;
+                        }
+                    } while (i != startsAt);
+                }
+                else
+                {
+                    int hiByte = (int)(curChar >> 8);
+                    int i1 = hiByte >> 6;
+                    //long l1 = 1L << (hiByte & 077);
+                    ulong l1 = (ulong)(1L << (hiByte & 63));
+                    int i2 = (curChar & 0xff) >> 6;
+                    //long l2 = 1L << (curChar & 077);
+                    ulong l2 = (ulong)(1L << (curChar & 63));
+                    do
+                    {
+                        switch (jjstateSet[--i])
+                        {
+                            case 0:
+                                if (!JjCanMove_0(hiByte, i1, i2, l1, l2))
+                                    break;
+                                if (kind > 22)
+                                    kind = 22;
+                                JjCheckNAddStates(0, 4);
+                                break;
+                            case 24:
+                                if (JjCanMove_0(hiByte, i1, i2, l1, l2))
+                                    JjAddStates(15, 16);
+                                break;
+                            case 27:
+                                if (JjCanMove_0(hiByte, i1, i2, l1, l2))
+                                    JjAddStates(12, 14);
+                                break;
+                            case 32:
+                                if (JjCanMove_0(hiByte, i1, i2, l1, l2))
+                                    JjCheckNAddTwoStates(32, 33);
+                                break;
+                            case 34:
+                                if (JjCanMove_0(hiByte, i1, i2, l1, l2))
+                                    JjCheckNAddTwoStates(34, 35);
+                                break;
+                            case 36:
+                                if (!JjCanMove_0(hiByte, i1, i2, l1, l2))
+                                    break;
+                                if (kind > 21)
+                                    kind = 21;
+                                jjstateSet[jjnewStateCnt++] = 36;
+                                break;
+                            case 37:
+                                if (!JjCanMove_0(hiByte, i1, i2, l1, l2))
+                                    break;
+                                if (kind > 22)
+                                    kind = 22;
+                                JjCheckNAdd(37);
+                                break;
+                            default: break;
+                        }
+                    } while (i != startsAt);
+                }
+                if (kind != 0x7fffffff)
+                {
+                    jjmatchedKind = kind;
+                    jjmatchedPos = curPos;
+                    kind = 0x7fffffff;
+                }
+                ++curPos;
+                if ((i = jjnewStateCnt) == (startsAt = 38 - (jjnewStateCnt = startsAt)))
+                    return curPos;
+                try { curChar = input_stream.ReadChar(); }
+                catch (System.IO.IOException e) { return curPos; }
+            }
+        }
+
+        private int JjMoveStringLiteralDfa0_0()
+        {
+            return JjMoveNfa_0(0, 0);
+        }
+        private int JjMoveNfa_0(int startState, int curPos)
+        {
+            int startsAt = 0;
+            jjnewStateCnt = 3;
+            int i = 1;
+            jjstateSet[0] = startState;
+            int kind = 0x7fffffff;
+            for (; ; )
+            {
+                if (++jjround == 0x7fffffff)
+                    ReInitRounds();
+                if (curChar < 64)
+                {
+                    long l = 1L << curChar;
+                    do
+                    {
+                        switch (jjstateSet[--i])
+                        {
+                            case 0:
+                                if ((0x3ff000000000000L & l) == 0L)
+                                    break;
+                                if (kind > 23)
+                                    kind = 23;
+                                JjAddStates(17, 18);
+                                break;
+                            case 1:
+                                if (curChar == 46)
+                                    JjCheckNAdd(2);
+                                break;
+                            case 2:
+                                if ((0x3ff000000000000L & l) == 0L)
+                                    break;
+                                if (kind > 23)
+                                    kind = 23;
+                                JjCheckNAdd(2);
+                                break;
+                            default: break;
+                        }
+                    } while (i != startsAt);
+                }
+                else if (curChar < 128)
+                {
+                    //long l = 1L << (curChar & 077);
+                    ulong l = (ulong)(1L << (curChar & 63)); 
+                    do
+                    {
+                        switch (jjstateSet[--i])
+                        {
+                            default: break;
+                        }
+                    } while (i != startsAt);
+                }
+                else
+                {
+                    int hiByte = (int)(curChar >> 8);
+                    int i1 = hiByte >> 6;
+                    //long l1 = 1L << (hiByte & 077);
+                    ulong l1 = (ulong)(1L << (hiByte & 63));
+                    int i2 = (curChar & 0xff) >> 6;
+                    //long l2 = 1L << (curChar & 077);
+                    ulong l2 = (ulong)(1L << (curChar & 63));
+                    do
+                    {
+                        switch (jjstateSet[--i])
+                        {
+                            default: break;
+                        }
+                    } while (i != startsAt);
+                }
+                if (kind != 0x7fffffff)
+                {
+                    jjmatchedKind = kind;
+                    jjmatchedPos = curPos;
+                    kind = 0x7fffffff;
+                }
+                ++curPos;
+                if ((i = jjnewStateCnt) == (startsAt = 3 - (jjnewStateCnt = startsAt)))
+                    return curPos;
+                try { curChar = input_stream.ReadChar(); }
+                catch (System.IO.IOException e) { return curPos; }
+            }
+        }
+        internal static readonly int[] jjnextStates = {
+            32, 33, 34, 35, 37, 24, 27, 28, 20, 17, 21, 18, 27, 28, 30, 24, 
+            25, 0, 1, 
+        };
+        private static bool JjCanMove_0(int hiByte, int i1, int i2, ulong l1, ulong l2)
+        {
+            switch (hiByte)
+            {
+                case 0:
+                    return ((jjbitVec2[i2] & l2) != 0L);
+                default:
+                    if ((jjbitVec0[i1] & l1) != 0L)
+                        return true;
+                    return false;
+            }
+        }
+
+        /** Token literal values. */
+        //public static readonly string[] jjstrLiteralImages = {
+        //    "", null, null, null, null, null, null, null, null, null, null, null, null, 
+        //    "\50", "\51", "\54", "\72", "\136", null, null, null, null, null, null 
+        //};
+
+        public static readonly string[] jjstrLiteralImages = {
+            "", null, null, null, null, null, null, null, null, null, null, null, null, 
+            "\x0028" /*"\50"*/, "\x0029" /*"\51"*/, "\x002C" /*"\54"*/, "\x003A" /*"\72"*/, "\x005E" /*"\136"*/, null, null, null, null, null, null 
+        };
+
+        /** Lexer state names. */
+        public static readonly string[] lexStateNames = {
+           "Boost",
+           "DEFAULT"
+        };
+
+        /** Lex State array. */
+        public static readonly int[] jjnewLexState = {
+           -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, -1, -1, -1, -1, -1, 1, 
+        };
+        internal static readonly long[] jjtoToken = {
+           0xffff01L, 
+        };
+        internal static readonly long[] jjtoSkip = {
+           0x80L, 
+        };
+        protected ICharStream input_stream;
+        private readonly uint[] jjrounds = new uint[38];
+        private readonly int[] jjstateSet = new int[76];
+        protected internal char curChar;
+
+        /** Constructor. */
+        public QueryParserTokenManager(ICharStream stream)
+        {
+            InitBlock();
+            input_stream = stream;
+        }
+
+        /** Constructor. */
+        public QueryParserTokenManager(ICharStream stream, int lexState)
+            : this(stream)
+        {
+            SwitchTo(lexState);
+        }
+
+        /** Reinitialise parser. */
+        public void ReInit(ICharStream stream)
+        {
+            jjmatchedPos = jjnewStateCnt = 0;
+            curLexState = defaultLexState;
+            input_stream = stream;
+            ReInitRounds();
+        }
+        private void ReInitRounds()
+        {
+            int i;
+            jjround = 0x80000001;
+            for (i = 38; i-- > 0; )
+                jjrounds[i] = 0x80000000;
+        }
+
+        /** Reinitialise parser. */
+        public void ReInit(ICharStream stream, int lexState)
+        {
+            ReInit(stream);
+            SwitchTo(lexState);
+        }
+
+        /** Switch to specified lex state. */
+        public void SwitchTo(int lexState)
+        {
+            if (lexState >= 2 || lexState < 0)
+                throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
+            else
+                curLexState = lexState;
+        }
+
+        protected Token JjFillToken()
+        {
+            Token t;
+            string curTokenImage;
+            int beginLine;
+            int endLine;
+            int beginColumn;
+            int endColumn;
+            string im = jjstrLiteralImages[jjmatchedKind];
+            curTokenImage = (im == null) ? input_stream.Image : im;
+            beginLine = input_stream.BeginLine;
+            beginColumn = input_stream.BeginColumn;
+            endLine = input_stream.EndLine;
+            endColumn = input_stream.EndColumn;
+            t = Token.NewToken(jjmatchedKind, curTokenImage);
+
+            t.beginLine = beginLine;
+            t.endLine = endLine;
+            t.beginColumn = beginColumn;
+            t.endColumn = endColumn;
+
+            return t;
+        }
+
+        internal int curLexState = 1;
+        internal int defaultLexState = 1;
+        internal int jjnewStateCnt;
+        internal uint jjround;
+        internal int jjmatchedPos;
+        internal int jjmatchedKind;
+
+        /// <summary>Get the next Token.</summary>
+        [SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate")]
+        public Token GetNextToken()
+        {
+            Token matchedToken;
+            int curPos = 0;
+
+            for (; ; )
+            {
+                try
+                {
+                    curChar = input_stream.BeginToken();
+                }
+                catch (System.IO.IOException e)
+                {
+                    jjmatchedKind = 0;
+                    matchedToken = JjFillToken();
+                    return matchedToken;
+                }
+
+                switch (curLexState)
+                {
+                    case 0:
+                        jjmatchedKind = 0x7fffffff;
+                        jjmatchedPos = 0;
+                        curPos = JjMoveStringLiteralDfa0_0();
+                        break;
+                    case 1:
+                        jjmatchedKind = 0x7fffffff;
+                        jjmatchedPos = 0;
+                        curPos = jjMoveStringLiteralDfa0_1();
+                        break;
+                }
+                if (jjmatchedKind != 0x7fffffff)
+                {
+                    if (jjmatchedPos + 1 < curPos)
+                        input_stream.Backup(curPos - jjmatchedPos - 1);
+                    if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
+                    {
+                        matchedToken = JjFillToken();
+                        if (jjnewLexState[jjmatchedKind] != -1)
+                            curLexState = jjnewLexState[jjmatchedKind];
+                        return matchedToken;
+                    }
+                    else
+                    {
+                        if (jjnewLexState[jjmatchedKind] != -1)
+                            curLexState = jjnewLexState[jjmatchedKind];
+                        goto EOFLoop;
+                    }
+                }
+                int error_line = input_stream.EndLine;
+                int error_column = input_stream.EndColumn;
+                string error_after = null;
+                bool EOFSeen = false;
+                try { input_stream.ReadChar(); input_stream.Backup(1); }
+                catch (System.IO.IOException e1)
+                {
+                    EOFSeen = true;
+                    error_after = curPos <= 1 ? "" : input_stream.Image;
+                    if (curChar == '\n' || curChar == '\r')
+                    {
+                        error_line++;
+                        error_column = 0;
+                    }
+                    else
+                        error_column++;
+                }
+                if (!EOFSeen)
+                {
+                    input_stream.Backup(1);
+                    error_after = curPos <= 1 ? "" : input_stream.Image;
+                }
+                throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
+            EOFLoop: ;
+            }
+        }
+
+        private void JjCheckNAdd(int state)
+        {
+            if (jjrounds[state] != jjround)
+            {
+                jjstateSet[jjnewStateCnt++] = state;
+                jjrounds[state] = jjround;
+            }
+        }
+        private void JjAddStates(int start, int end)
+        {
+            do
+            {
+                jjstateSet[jjnewStateCnt++] = jjnextStates[start];
+            } while (start++ != end);
+        }
+        private void JjCheckNAddTwoStates(int state1, int state2)
+        {
+            JjCheckNAdd(state1);
+            JjCheckNAdd(state2);
+        }
+
+        private void JjCheckNAddStates(int start, int end)
+        {
+            do
+            {
+                JjCheckNAdd(jjnextStates[start]);
+            } while (start++ != end);
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Parser/Token.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/Token.cs b/Lucene.Net.QueryParser/Surround/Parser/Token.cs
new file mode 100644
index 0000000..2d9b83d
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Parser/Token.cs
@@ -0,0 +1,142 @@
+\ufeffusing System;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+	
+	/// <summary> 
+    /// Describes the input token stream.
+    /// </summary>
+    [Serializable]
+	public class Token
+	{
+		
+		/// <summary> 
+        /// An integer that describes the kind of this token.  This numbering
+		/// system is determined by JavaCCParser, and a table of these numbers is
+		/// stored in the file ...Constants.java.
+		/// </summary>
+		public int kind;
+		
+		/// <summary>The line number of the first character of this Token. </summary>
+		public int beginLine;
+		/// <summary>The column number of the first character of this Token. </summary>
+		public int beginColumn;
+		/// <summary>The line number of the last character of this Token. </summary>
+		public int endLine;
+		/// <summary>The column number of the last character of this Token. </summary>
+		public int endColumn;
+		
+		/// <summary>The string image of the token.</summary>
+		public string image;
+		
+		/// <summary> 
+        /// A reference to the next regular (non-special) token from the input
+		/// stream.  If this is the last token from the input stream, or if the
+		/// token manager has not read tokens beyond this one, this field is
+		/// set to null.  This is true only if this token is also a regular
+		/// token.  Otherwise, see below for a description of the contents of
+		/// this field.
+		/// </summary>
+		public Token next;
+		
+		/// <summary> 
+        /// This field is used to access special tokens that occur prior to this
+		/// token, but after the immediately preceding regular (non-special) token.
+		/// If there are no such special tokens, this field is set to null.
+		/// When there are more than one such special token, this field refers
+		/// to the last of these special tokens, which in turn refers to the next
+		/// previous special token through its specialToken field, and so on
+		/// until the first special token (whose specialToken field is null).
+		/// The next fields of special tokens refer to other special tokens that
+		/// immediately follow it (without an intervening regular token).  If there
+		/// is no such token, this field is null.
+		/// </summary>
+		public Token specialToken;
+
+	    /// <summary> 
+        /// An optional attribute value of the Token.
+	    /// Tokens which are not used as syntactic sugar will often contain
+	    /// meaningful values that will be used later on by the compiler or
+	    /// interpreter. This attribute value is often different from the image.
+	    /// Any subclass of Token that actually wants to return a non-null value can
+	    /// override this method as appropriate.
+	    /// </summary>
+	    public virtual object Value
+	    {
+	        get { return null; }
+	    }
+
+	    /// <summary> 
+        /// No-argument constructor
+        /// </summary>
+		public Token()
+		{
+		}
+		
+		/// <summary> 
+        /// Constructs a new token for the specified Image.
+        /// </summary>
+		public Token(int kind)
+            : this(kind, null)
+		{
+		}
+		
+		/// <summary> 
+        /// Constructs a new token for the specified Image and Kind.
+        /// </summary>
+		public Token(int kind, string image)
+		{
+			this.kind = kind;
+			this.image = image;
+		}
+		
+		/// <summary> 
+        /// Returns the image.
+        /// </summary>
+		public override string ToString()
+		{
+			return image;
+		}
+		
+		/// <summary> 
+        /// Returns a new Token object, by default. However, if you want, you
+		/// can create and return subclass objects based on the value of ofKind.
+		/// Simply add the cases to the switch for all those special cases.
+		/// For example, if you have a subclass of Token called IDToken that
+		/// you want to create if ofKind is ID, simply add something like :
+		/// 
+		/// case MyParserConstants.ID : return new IDToken(ofKind, image);
+		/// 
+		/// to the following switch statement. Then you can cast matchedToken
+		/// variable to the appropriate type and use sit in your lexical actions.
+		/// </summary>
+		public static Token NewToken(int ofKind, string image)
+		{
+			switch (ofKind)
+			{
+				default:  return new Token(ofKind, image);
+			}
+		}
+		
+		public static Token NewToken(int ofKind)
+		{
+			return NewToken(ofKind, null);
+		}
+	}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs b/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
new file mode 100644
index 0000000..2ccfc58
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
@@ -0,0 +1,170 @@
+\ufeffusing System;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+	/// <summary>Token Manager Error. </summary>
+	[Serializable]
+	public class TokenMgrError : Exception
+	{
+        /*
+		* Ordinals for various reasons why an Error of this type can be thrown.
+		*/
+
+        /// <summary> Lexical error occurred.</summary>
+        internal const int LEXICAL_ERROR = 0;
+
+        /// <summary> An attempt was made to create a second instance of a static token manager.</summary>
+        internal const int STATIC_LEXER_ERROR = 1;
+
+        /// <summary> Tried to change to an invalid lexical state.</summary>
+        internal const int INVALID_LEXICAL_STATE = 2;
+
+        /// <summary> Detected (and bailed out of) an infinite loop in the token manager.</summary>
+        internal const int LOOP_DETECTED = 3;
+
+        /// <summary> Indicates the reason why the exception is thrown. It will have
+        /// one of the above 4 values.
+        /// </summary>
+        internal int errorCode;
+
+        /// <summary> 
+        /// Replaces unprintable characters by their escaped (or unicode escaped)
+        /// equivalents in the given string
+        /// </summary>
+        protected internal static string AddEscapes(string str)
+        {
+            StringBuilder retval = new StringBuilder();
+            char ch;
+            for (int i = 0; i < str.Length; i++)
+            {
+                switch (str[i])
+                {
+
+                    case (char)(0):
+                        continue;
+
+                    case '\b':
+                        retval.Append("\\b");
+                        continue;
+
+                    case '\t':
+                        retval.Append("\\t");
+                        continue;
+
+                    case '\n':
+                        retval.Append("\\n");
+                        continue;
+
+                    case '\f':
+                        retval.Append("\\f");
+                        continue;
+
+                    case '\r':
+                        retval.Append("\\r");
+                        continue;
+
+                    case '\"':
+                        retval.Append("\\\"");
+                        continue;
+
+                    case '\'':
+                        retval.Append("\\\'");
+                        continue;
+
+                    case '\\':
+                        retval.Append("\\\\");
+                        continue;
+
+                    default:
+                        if ((ch = str[i]) < 0x20 || ch > 0x7e)
+                        {
+                            string s = "0000" + Convert.ToString(ch, 16);
+                            retval.Append("\\u" + s.Substring(s.Length - 4, (s.Length) - (s.Length - 4)));
+                        }
+                        else
+                        {
+                            retval.Append(ch);
+                        }
+                        continue;
+
+                }
+            }
+            return retval.ToString();
+        }
+
+        /// <summary>
+        /// Returns a detailed message for the Error when it is thrown by the
+        /// token manager to indicate a lexical error.
+        /// </summary>
+        /// <remarks>You can customize the lexical error message by modifying this method.</remarks>
+        /// <param name="EOFSeen">indicates if EOF caused the lexical error</param>
+        /// <param name="lexState">lexical state in which this error occurred</param>
+        /// <param name="errorLine">line number when the error occurred</param>
+        /// <param name="errorColumn">column number when the error occurred</param>
+        /// <param name="errorAfter">prefix that was seen before this error occurred</param>
+        /// <param name="curChar">the offending character</param>
+        /// <returns>Detailed error message</returns>
+        protected internal static string LexicalError(bool EOFSeen, int lexState, int errorLine, int errorColumn, string errorAfter, char curChar)
+        {
+            return ("Lexical error at line " +
+                errorLine + ", column " +
+                errorColumn + ".  Encountered: " +
+                (EOFSeen ? "<EOF> " : ("\"" + AddEscapes(Convert.ToString(curChar)) + "\"") + " (" + (int)curChar + "), ") +
+                "after : \"" + AddEscapes(errorAfter) + "\"");
+        }
+
+		/// <summary> 
+        /// You can also modify the body of this method to customize your error messages.
+		/// For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not
+		/// of end-users concern, so you can return something like :
+		/// 
+		/// "Internal Error : Please file a bug report .... "
+		/// 
+		/// from this method for such cases in the release version of your parser.
+		/// </summary>
+		public override string Message
+		{
+			get { return base.Message; }
+		}
+		
+		/*
+		* Constructors of various flavors follow.
+		*/
+		
+		/// <summary>No arg constructor. </summary>
+		public TokenMgrError()
+		{
+		}
+		
+		/// <summary>Constructor with message and reason. </summary>
+		public TokenMgrError(string message, int reason)
+            : base(message)
+		{
+			errorCode = reason;
+		}
+		
+		/// <summary>Full Constructor. </summary>
+		public TokenMgrError(bool EOFSeen, int lexState, int errorLine, int errorColumn, string errorAfter, char curChar, int reason)
+            : this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason)
+		{
+		}
+	}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs b/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs
new file mode 100644
index 0000000..aa00e0d
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs
@@ -0,0 +1,39 @@
+\ufeffusing Lucene.Net.Search;
+using System.Collections.Generic;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Factory for conjunctions
+    /// </summary>
+    public class AndQuery : ComposedQuery
+    {
+        public AndQuery(IEnumerable<SrndQuery> queries, bool inf, string opName)
+            : base(queries, inf, opName)
+        {
+        }
+
+        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
+        {
+            return SrndBooleanQuery.MakeBooleanQuery( /* subqueries can be individually boosted */
+              MakeLuceneSubQueriesField(fieldName, qf), BooleanClause.Occur.MUST);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs b/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
new file mode 100644
index 0000000..8992746
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
@@ -0,0 +1,110 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Search.Spans;
+using System.Runtime.CompilerServices;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+
+     // Create basic queries to be used during rewrite.
+     // The basic queries are TermQuery and SpanTermQuery.
+     // An exception can be thrown when too many of these are used.
+     // SpanTermQuery and TermQuery use IndexReader.termEnum(Term), which causes the buffer usage.
+     
+     // Use this class to limit the buffer usage for reading terms from an index.
+     // Default is 1024, the same as the max. number of subqueries for a BooleanQuery.
+
+
+
+    /// <summary>
+    /// Factory for creating basic term queries
+    /// </summary>
+    public class BasicQueryFactory
+    {
+        public BasicQueryFactory(int maxBasicQueries)
+        {
+            this.maxBasicQueries = maxBasicQueries;
+            this.queriesMade = 0;
+        }
+
+        public BasicQueryFactory()
+            : this(1024)
+        {
+        }
+
+        private int maxBasicQueries;
+        private int queriesMade;
+
+        public int NrQueriesMade { get { return queriesMade; } }
+        public int MaxBasicQueries { get { return maxBasicQueries; } }
+
+        public override string ToString()
+        {
+            return GetType().Name
+                + "(maxBasicQueries: " + maxBasicQueries
+                + ", queriesMade: " + queriesMade
+                + ")";
+        }
+
+        private bool AtMax
+        {
+            get { return queriesMade >= maxBasicQueries; }
+        }
+
+        [MethodImpl(MethodImplOptions.Synchronized)]
+        protected virtual void CheckMax()
+        {
+            if (AtMax)
+                throw new TooManyBasicQueries(MaxBasicQueries);
+            queriesMade++;
+        }
+
+        public TermQuery NewTermQuery(Term term)
+        {
+            CheckMax();
+            return new TermQuery(term);
+        }
+
+        public SpanTermQuery NewSpanTermQuery(Term term)
+        {
+            CheckMax();
+            return new SpanTermQuery(term);
+        }
+
+        public override int GetHashCode()
+        {
+            return GetType().GetHashCode() ^ (AtMax ? 7 : 31 * 32);
+        }
+
+        /// <summary>
+        /// Two BasicQueryFactory's are equal when they generate
+        /// the same types of basic queries, or both cannot generate queries anymore.
+        /// </summary>
+        /// <param name="obj"></param>
+        /// <returns></returns>
+        public override bool Equals(object obj)
+        {
+            if (!(obj is BasicQueryFactory))
+                return false;
+            BasicQueryFactory other = (BasicQueryFactory)obj;
+            return AtMax == other.AtMax;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs b/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
new file mode 100644
index 0000000..d421ad6
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/ComposedQuery.cs
@@ -0,0 +1,144 @@
+\ufeffusing System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Base class for composite queries (such as AND/OR/NOT)
+    /// </summary>
+    public abstract class ComposedQuery : SrndQuery
+    {
+        public ComposedQuery(IEnumerable<SrndQuery> qs, bool operatorInfix, string opName)
+        {
+            Recompose(qs);
+            this.operatorInfix = operatorInfix;
+            this.opName = opName;
+        }
+
+        protected virtual void Recompose(IEnumerable<SrndQuery> queries)
+        {
+            if (queries.Count() < 2) throw new InvalidOperationException("Too few subqueries");
+            this.queries = new List<SrndQuery>(queries);
+        }
+
+        protected string opName;
+        public virtual string OperatorName { get { return opName; } }
+
+        protected IList<SrndQuery> queries;
+
+        public virtual IEnumerator<SrndQuery> GetSubQueriesEnumerator()
+        {
+            return queries.GetEnumerator();
+        }
+
+        public virtual int NrSubQueries { get { return queries.Count; } }
+
+        public virtual SrndQuery GetSubQuery(int qn) { return queries[qn]; }
+
+        private bool operatorInfix;
+        public virtual bool IsOperatorInfix { get { return operatorInfix; } } /* else prefix operator */
+
+        public IEnumerable<Search.Query> MakeLuceneSubQueriesField(string fn, BasicQueryFactory qf)
+        {
+            List<Search.Query> luceneSubQueries = new List<Search.Query>();
+            IEnumerator<SrndQuery> sqi = GetSubQueriesEnumerator();
+            while (sqi.MoveNext())
+            {
+                luceneSubQueries.Add((sqi.Current).MakeLuceneQueryField(fn, qf));
+            }
+            return luceneSubQueries;
+        }
+
+        public override string ToString()
+        {
+            StringBuilder r = new StringBuilder();
+            if (IsOperatorInfix)
+            {
+                InfixToString(r);
+            }
+            else
+            {
+                PrefixToString(r);
+            }
+            WeightToString(r);
+            return r.ToString();
+        }
+
+        // Override for different spacing
+        protected virtual string PrefixSeparator { get { return ", "; } }
+        protected virtual string BracketOpen { get { return "("; } }
+        protected virtual string BracketClose { get { return ")"; } }
+
+        protected virtual void InfixToString(StringBuilder r)
+        {
+            /* Brackets are possibly redundant in the result. */
+            IEnumerator<SrndQuery> sqi = GetSubQueriesEnumerator();
+            r.Append(BracketOpen);
+            if (sqi.MoveNext())
+            {
+                r.Append(sqi.Current.ToString());
+                while (sqi.MoveNext())
+                {
+                    r.Append(" ");
+                    r.Append(OperatorName); /* infix operator */
+                    r.Append(" ");
+                    r.Append(sqi.Current.ToString());
+                }
+            }
+            r.Append(BracketClose);
+        }
+
+        protected virtual void PrefixToString(StringBuilder r)
+        {
+            IEnumerator<SrndQuery> sqi = GetSubQueriesEnumerator();
+            r.Append(OperatorName); /* prefix operator */
+            r.Append(BracketOpen);
+            if (sqi.MoveNext())
+            {
+                r.Append(sqi.Current.ToString());
+                while (sqi.MoveNext())
+                {
+                    r.Append(PrefixSeparator);
+                    r.Append(sqi.Current.ToString());
+                }
+            }
+            r.Append(BracketClose);
+        }
+
+        public override bool IsFieldsSubQueryAcceptable
+        {
+            get
+            {
+                /* at least one subquery should be acceptable */
+                IEnumerator<SrndQuery> sqi = GetSubQueriesEnumerator();
+                while (sqi.MoveNext())
+                {
+                    if ((sqi.Current).IsFieldsSubQueryAcceptable)
+                    {
+                        return true;
+                    }
+                }
+                return false;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs b/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs
new file mode 100644
index 0000000..1ca7a01
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/DistanceQuery.cs
@@ -0,0 +1,117 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Search.Spans;
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Factory for NEAR queries 
+    /// </summary>
+    public class DistanceQuery : ComposedQuery, IDistanceSubQuery
+    {
+        public DistanceQuery(
+            IEnumerable<SrndQuery> queries,
+            bool infix,
+            int opDistance,
+            string opName,
+            bool ordered)
+            : base(queries, infix, opName)
+        {
+            this.opDistance = opDistance; /* the distance indicated in the operator */
+            this.ordered = ordered;
+        }
+
+        private int opDistance;
+        public virtual int OpDistance { get { return opDistance; } }
+
+        private bool ordered;
+        public virtual bool QueriesOrdered { get { return ordered; } }
+
+
+        public virtual string DistanceSubQueryNotAllowed()
+        {
+            var sqi = GetSubQueriesEnumerator();
+            while (sqi.MoveNext())
+            {
+                var dsq = sqi.Current as IDistanceSubQuery;
+                if (dsq != null)
+                {
+                    string m = dsq.DistanceSubQueryNotAllowed();
+                    if (m != null)
+                    {
+                        return m;
+                    }
+                }
+                else
+                {
+                    return "Operator " + OperatorName + " does not allow subquery " + dsq.ToString();
+                }
+            }
+            return null; /* subqueries acceptable */
+        }
+
+        public virtual void AddSpanQueries(SpanNearClauseFactory sncf)
+        {
+            Search.Query snq = GetSpanNearQuery(sncf.IndexReader,
+                                  sncf.FieldName,
+                                  Weight,
+                                  sncf.BasicQueryFactory);
+            sncf.AddSpanQuery(snq);
+        }
+
+        public Search.Query GetSpanNearQuery(
+            IndexReader reader,
+            String fieldName,
+            float boost,
+            BasicQueryFactory qf)
+        {
+            SpanQuery[] spanClauses = new SpanQuery[NrSubQueries];
+            var sqi = GetSubQueriesEnumerator();
+            int qi = 0;
+            while (sqi.MoveNext())
+            {
+                SpanNearClauseFactory sncf = new SpanNearClauseFactory(reader, fieldName, qf);
+
+                ((IDistanceSubQuery)sqi.Current).AddSpanQueries(sncf);
+                if (sncf.Count == 0)
+                { /* distance operator requires all sub queries */
+                    while (sqi.MoveNext())
+                    { /* produce evt. error messages but ignore results */
+                        ((IDistanceSubQuery)sqi.Current).AddSpanQueries(sncf);
+                        sncf.Clear();
+                    }
+                    return SrndQuery.TheEmptyLcnQuery;
+                }
+
+                spanClauses[qi] = sncf.MakeSpanClause();
+                qi++;
+            }
+            SpanNearQuery r = new SpanNearQuery(spanClauses, OpDistance - 1, QueriesOrdered);
+            r.Boost = boost;
+            return r;
+        }
+
+        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
+        {
+            return new DistanceRewriteQuery(this, fieldName, qf);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/DistanceRewriteQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/DistanceRewriteQuery.cs b/Lucene.Net.QueryParser/Surround/Query/DistanceRewriteQuery.cs
new file mode 100644
index 0000000..3d3a108
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/DistanceRewriteQuery.cs
@@ -0,0 +1,35 @@
+\ufeffnamespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    internal class DistanceRewriteQuery : RewriteQuery<DistanceQuery>
+    {
+        public DistanceRewriteQuery(
+            DistanceQuery srndQuery,
+            string fieldName,
+            BasicQueryFactory qf)
+            : base(srndQuery, fieldName, qf)
+        {
+        }
+
+        public override Search.Query Rewrite(Index.IndexReader reader)
+        {
+            return srndQuery.GetSpanNearQuery(reader, fieldName, Boost, qf);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/DistanceSubQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/DistanceSubQuery.cs b/Lucene.Net.QueryParser/Surround/Query/DistanceSubQuery.cs
new file mode 100644
index 0000000..639f9e0
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/DistanceSubQuery.cs
@@ -0,0 +1,36 @@
+\ufeffnamespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Interface for queries that can be nested as subqueries
+    /// into a span near.
+    /// </summary>
+    public interface IDistanceSubQuery
+    {
+        /// <summary>
+        /// When distanceSubQueryNotAllowed() returns non null, the reason why the subquery
+        /// is not allowed as a distance subquery is returned.
+        /// <br>When distanceSubQueryNotAllowed() returns null addSpanNearQueries() can be used
+        /// in the creation of the span near clause for the subquery.
+        /// </summary>
+        string DistanceSubQueryNotAllowed();
+
+        void AddSpanQueries(SpanNearClauseFactory sncf);
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs b/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
new file mode 100644
index 0000000..912bf36
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/FieldsQuery.cs
@@ -0,0 +1,105 @@
+\ufeffusing System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Forms an OR query of the provided query across multiple fields.
+    /// </summary>
+    public class FieldsQuery : SrndQuery /* mostly untested */
+    {
+        private SrndQuery q;
+        private IEnumerable<string> fieldNames;
+        private readonly char fieldOp;
+        private readonly string OrOperatorName = "OR"; /* for expanded queries, not normally visible */
+
+        public FieldsQuery(SrndQuery q, IEnumerable<string> fieldNames, char fieldOp)
+        {
+            this.q = q;
+            this.fieldNames = new List<string>(fieldNames);
+            this.fieldOp = fieldOp;
+        }
+
+        public FieldsQuery(SrndQuery q, string fieldName, char fieldOp)
+        {
+            this.q = q;
+            var fieldNameList = new List<string>();
+            fieldNameList.Add(fieldName);
+            this.fieldNames = fieldNameList;
+            this.fieldOp = fieldOp;
+        }
+
+        public override bool IsFieldsSubQueryAcceptable
+        {
+            get { return false; }
+        }
+
+        public Search.Query MakeLuceneQueryNoBoost(BasicQueryFactory qf)
+        {
+            if (fieldNames.Count() == 1)
+            { /* single field name: no new queries needed */
+                return q.MakeLuceneQueryFieldNoBoost(fieldNames.FirstOrDefault(), qf);
+            }
+            else
+            { /* OR query over the fields */
+                List<SrndQuery> queries = new List<SrndQuery>();
+                foreach (var fieldName in fieldNames)
+                {
+                    var qc = (SrndQuery)q.Clone();
+                    queries.Add(new FieldsQuery(qc, fieldName, fieldOp));
+                }
+                OrQuery oq = new OrQuery(queries,
+                                        true /* infix OR for field names */,
+                                        OrOperatorName);
+                // System.out.println(getClass().toString() + ", fields expanded: " + oq.toString()); /* needs testing */
+                return oq.MakeLuceneQueryField(null, qf);
+            }
+        }
+
+        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
+        {
+            return MakeLuceneQueryNoBoost(qf); /* use this.fieldNames instead of fieldName */
+        }
+
+        public virtual IEnumerable<string> FieldNames { get { return fieldNames; } }
+
+        public virtual char FieldOperator { get { return fieldOp; } }
+
+        public override string ToString()
+        {
+            StringBuilder r = new StringBuilder();
+            r.Append("(");
+            FieldNamesToString(r);
+            r.Append(q.ToString());
+            r.Append(")");
+            return r.ToString();
+        }
+
+        protected virtual void FieldNamesToString(StringBuilder r)
+        {
+            foreach (var fieldName in FieldNames)
+            {
+                r.Append(fieldName);
+                r.Append(FieldOperator);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/NotQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/NotQuery.cs b/Lucene.Net.QueryParser/Surround/Query/NotQuery.cs
new file mode 100644
index 0000000..30d40a8
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/NotQuery.cs
@@ -0,0 +1,48 @@
+\ufeffusing Lucene.Net.Search;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Factory for prohibited clauses
+    /// </summary>
+    public class NotQuery : ComposedQuery
+    {
+        public NotQuery(IEnumerable<SrndQuery> queries, string opName)
+            : base(queries, true /* infix */, opName)
+        {
+        }
+
+        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
+        {
+            var luceneSubQueries = MakeLuceneSubQueriesField(fieldName, qf);
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(luceneSubQueries.FirstOrDefault(), BooleanClause.Occur.MUST);
+            SrndBooleanQuery.AddQueriesToBoolean(bq,
+                // FIXME: do not allow weights on prohibited subqueries.
+                    //luceneSubQueries.subList(1, luceneSubQueries.size()),
+                    luceneSubQueries.Skip(1).ToList(),
+                // later subqueries: not required, prohibited
+                    BooleanClause.Occur.MUST_NOT);
+            return bq;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs b/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs
new file mode 100644
index 0000000..f7d0036
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/OrQuery.cs
@@ -0,0 +1,71 @@
+\ufeffusing Lucene.Net.Search;
+using System.Collections.Generic;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Factory for disjunctions
+    /// </summary>
+    public class OrQuery : ComposedQuery, IDistanceSubQuery
+    {
+        public OrQuery(IEnumerable<SrndQuery> queries, bool infix, string opName)
+            : base(queries, infix, opName)
+        {
+        }
+
+        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
+        {
+            return SrndBooleanQuery.MakeBooleanQuery(
+                /* subqueries can be individually boosted */
+                MakeLuceneSubQueriesField(fieldName, qf), BooleanClause.Occur.SHOULD);
+        }
+
+        public virtual string DistanceSubQueryNotAllowed()
+        {
+            var sqi = GetSubQueriesEnumerator();
+            while (sqi.MoveNext())
+            {
+                SrndQuery leq = sqi.Current;
+                if (leq is IDistanceSubQuery)
+                {
+                    string m = ((IDistanceSubQuery)leq).DistanceSubQueryNotAllowed();
+                    if (m != null)
+                    {
+                        return m;
+                    }
+                }
+                else
+                {
+                    return "subquery not allowed: " + leq.ToString();
+                }
+            }
+            return null;
+        }
+
+        public virtual void AddSpanQueries(SpanNearClauseFactory sncf)
+        {
+            var sqi = GetSubQueriesEnumerator();
+            while (sqi.MoveNext())
+            {
+                ((IDistanceSubQuery)sqi.Current).AddSpanQueries(sncf);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs b/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs
new file mode 100644
index 0000000..030923f
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/RewriteQuery.cs
@@ -0,0 +1,85 @@
+\ufeffusing Lucene.Net.Index;
+using System;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public abstract class RewriteQuery<SQ> : Search.Query
+    {
+        protected readonly SQ srndQuery;
+        protected readonly string fieldName;
+        protected readonly BasicQueryFactory qf;
+
+        public RewriteQuery(
+            SQ srndQuery,
+            String fieldName,
+            BasicQueryFactory qf)
+        {
+            this.srndQuery = srndQuery;
+            this.fieldName = fieldName;
+            this.qf = qf;
+        }
+
+        public abstract override Search.Query Rewrite(IndexReader reader);
+
+        public override string ToString()
+        {
+            return ToString(null);
+        }
+
+        public override string ToString(string field)
+        {
+            return GetType().Name
+                + (field == null ? "" : "(unused: " + field + ")")
+                + "(" + fieldName
+                + ", " + srndQuery.ToString()
+                + ", " + qf.ToString()
+                + ")";
+        }
+
+        public override int GetHashCode()
+        {
+            return GetType().GetHashCode()
+                ^ fieldName.GetHashCode()
+                ^ qf.GetHashCode()
+                ^ srndQuery.GetHashCode();
+        }
+
+        public override bool Equals(object obj)
+        {
+            if (obj == null)
+                return false;
+            if (!GetType().Equals(obj.GetType()))
+                return false;
+            RewriteQuery<SQ> other = (RewriteQuery<SQ>)obj;
+            return fieldName.Equals(other.fieldName)
+                && qf.Equals(other.qf)
+                && srndQuery.Equals(other.srndQuery);
+        }
+
+        /// <summary>
+        /// Not supported by this query.
+        /// </summary>
+        /// <exception cref="NotSupportedException">throws NotSupportedException always: clone is not supported.</exception>
+        public override object Clone()
+        {
+            throw new NotSupportedException();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs b/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
new file mode 100644
index 0000000..5e39e03
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/SimpleTerm.cs
@@ -0,0 +1,118 @@
+\ufeffusing Lucene.Net.Index;
+using System;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Base class for queries that expand to sets of simple terms.
+    /// </summary>
+    public abstract class SimpleTerm : SrndQuery, IDistanceSubQuery, IComparable<SimpleTerm>
+    {
+        public SimpleTerm(bool q) 
+        { 
+            quoted = q; 
+        }
+
+        private bool quoted;
+        internal bool IsQuoted { get { return quoted; } }
+
+        public virtual string Quote { get { return "\""; }}
+        public virtual string FieldOperator { get { return "/"; } }
+
+        public abstract string ToStringUnquoted();
+
+        [Obsolete("deprecated (March 2011) Not normally used, to be removed from Lucene 4.0. This class implementing Comparable is to be removed at the same time.")]
+        public int CompareTo(SimpleTerm ost)
+        {
+            /* for ordering terms and prefixes before using an index, not used */
+            return this.ToStringUnquoted().CompareTo(ost.ToStringUnquoted());
+        }
+
+        protected virtual void SuffixToString(StringBuilder r) { } /* override for prefix query */
+
+
+        public override string ToString()
+        {
+            StringBuilder r = new StringBuilder();
+            if (IsQuoted)
+            {
+                r.Append(Quote);
+            }
+            r.Append(ToStringUnquoted());
+            if (IsQuoted)
+            {
+                r.Append(Quote);
+            }
+            SuffixToString(r);
+            WeightToString(r);
+            return r.ToString();
+        }
+
+        public abstract void VisitMatchingTerms(
+                            IndexReader reader,
+                            string fieldName,
+                            IMatchingTermVisitor mtv);
+
+        /// <summary>
+        /// Callback to visit each matching term during "rewrite"
+        /// in <see cref="M:VisitMatchingTerm(Term)"/>
+        /// </summary>
+        public interface IMatchingTermVisitor
+        {
+            void VisitMatchingTerm(Term t);
+        }
+
+        public string DistanceSubQueryNotAllowed()
+        {
+            return null;
+        }
+
+        public void AddSpanQueries(SpanNearClauseFactory sncf)
+        {
+            VisitMatchingTerms(
+                sncf.IndexReader,
+                sncf.FieldName,
+                new AddSpanQueriesMatchingTermVisitor(sncf, Weight));
+        }
+
+        internal class AddSpanQueriesMatchingTermVisitor : IMatchingTermVisitor
+        {
+            private readonly SpanNearClauseFactory sncf;
+            private readonly float weight;
+
+            public AddSpanQueriesMatchingTermVisitor(SpanNearClauseFactory sncf, float weight)
+            {
+                this.sncf = sncf;
+                this.weight = weight;
+            }
+
+            public void VisitMatchingTerm(Term term)
+            {
+                sncf.AddTermWeighted(term, weight);
+            }
+        }
+
+        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
+        {
+            return new SimpleTermRewriteQuery(this, fieldName, qf);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs b/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
new file mode 100644
index 0000000..6502d6c
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/SimpleTermRewriteQuery.cs
@@ -0,0 +1,64 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Search;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    internal class SimpleTermRewriteQuery : RewriteQuery<SimpleTerm>
+    {
+        public  SimpleTermRewriteQuery(
+            SimpleTerm srndQuery,
+            string fieldName,
+            BasicQueryFactory qf)
+            : base(srndQuery, fieldName, qf)
+        {
+        }
+
+        public override Search.Query Rewrite(IndexReader reader)
+        {
+            var luceneSubQueries = new List<Search.Query>();
+            srndQuery.VisitMatchingTerms(reader, fieldName, 
+                new SimpleTermRewriteMatchingTermVisitor(luceneSubQueries, qf));
+            return (luceneSubQueries.Count == 0) ? SrndQuery.TheEmptyLcnQuery
+                : (luceneSubQueries.Count == 1) ? luceneSubQueries.First()
+                : SrndBooleanQuery.MakeBooleanQuery(
+                /* luceneSubQueries all have default weight */
+                luceneSubQueries, BooleanClause.Occur.SHOULD); /* OR the subquery terms */
+        }
+
+        internal class SimpleTermRewriteMatchingTermVisitor : SimpleTerm.IMatchingTermVisitor
+        {
+            private readonly IList<Search.Query> luceneSubQueries;
+            private readonly BasicQueryFactory qf;
+
+            public SimpleTermRewriteMatchingTermVisitor(IList<Search.Query> luceneSubQueries, BasicQueryFactory qf)
+            {
+                this.luceneSubQueries = luceneSubQueries;
+                this.qf = qf;
+            }
+
+            public void VisitMatchingTerm(Term term)
+            {
+                luceneSubQueries.Add(qf.NewTermQuery(term));
+            }
+        }
+    }
+}


[43/50] [abbrv] lucenenet git commit: Moved the TestToStringUtils class to the Lucene.Net.Core.Support namespace.

Posted by sy...@apache.org.
Moved the TestToStringUtils class to the Lucene.Net.Core.Support namespace.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/2f07fa26
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/2f07fa26
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/2f07fa26

Branch: refs/heads/master
Commit: 2f07fa26b5be1f41c9b4353488c413a007e6274b
Parents: 34284ee
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Mon Aug 8 16:12:58 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:31:24 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Tests/Lucene.Net.Tests.csproj    |  2 +-
 .../core/Support/TestToStringUtils.cs           | 55 ++++++++++++++++++++
 .../core/Util/TestToStringUtils.cs              | 52 ------------------
 3 files changed, 56 insertions(+), 53 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2f07fa26/src/Lucene.Net.Tests/Lucene.Net.Tests.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Lucene.Net.Tests.csproj b/src/Lucene.Net.Tests/Lucene.Net.Tests.csproj
index 4755965..675d6e1 100644
--- a/src/Lucene.Net.Tests/Lucene.Net.Tests.csproj
+++ b/src/Lucene.Net.Tests/Lucene.Net.Tests.csproj
@@ -589,7 +589,7 @@
     <Compile Include="core\Util\TestTimSorter.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="core\Util\TestToStringUtils.cs" />
+    <Compile Include="core\Support\TestToStringUtils.cs" />
     <Compile Include="core\Util\TestUnicodeUtil.cs">
       <SubType>Code</SubType>
     </Compile>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2f07fa26/src/Lucene.Net.Tests/core/Support/TestToStringUtils.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Support/TestToStringUtils.cs b/src/Lucene.Net.Tests/core/Support/TestToStringUtils.cs
new file mode 100644
index 0000000..e2f645b
--- /dev/null
+++ b/src/Lucene.Net.Tests/core/Support/TestToStringUtils.cs
@@ -0,0 +1,55 @@
+\ufeffusing Lucene.Net.Util;
+using NUnit.Framework;
+using System.Globalization;
+using System.Threading;
+
+namespace Lucene.Net.Core.Support
+{
+    /// <summary>
+    /// This test was added for .NET compatibility - LUCENENET specific
+    /// 
+    /// It tests the Lucene.Net.Util.ToStringUtils which was untested in the Java counterpart,
+    /// but required some help to ensure .NET compatibility.
+    /// </summary>
+    public class TestToStringUtils : LuceneTestCase
+    {
+        CultureInfo originalCulture;
+        public override void SetUp()
+        {
+            base.SetUp();
+            originalCulture = Thread.CurrentThread.CurrentCulture;
+        }
+
+        public override void TearDown()
+        {
+            Thread.CurrentThread.CurrentCulture = originalCulture;
+            base.TearDown();
+        }
+
+        /// <summary>
+        /// Check to ensure that the Boost function is properly converted in every possible culture.
+        /// </summary>
+        [Test]
+        public void TestBoost()
+        {
+            float boostNormal = 1f;
+            float boostFractional = 2.5f;
+            float boostNonFractional = 5f;
+            float boostLong = 1.111111111f;
+            float boostZeroNonFractional = 0f;
+            float boostZeroFractional = 0.123f;
+
+            foreach (CultureInfo culture in CultureInfo.GetCultures(CultureTypes.SpecificCultures | CultureTypes.NeutralCultures))
+            {
+                Thread.CurrentThread.CurrentCulture = culture;
+
+                assertEquals("", ToStringUtils.Boost(boostNormal));
+                assertEquals("^2.5", ToStringUtils.Boost(boostFractional));
+                assertEquals("^5.0", ToStringUtils.Boost(boostNonFractional));
+                assertEquals("^1.111111", ToStringUtils.Boost(boostLong));
+                assertEquals("^0.0", ToStringUtils.Boost(boostZeroNonFractional));
+                assertEquals("^0.123", ToStringUtils.Boost(boostZeroFractional));
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/2f07fa26/src/Lucene.Net.Tests/core/Util/TestToStringUtils.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Util/TestToStringUtils.cs b/src/Lucene.Net.Tests/core/Util/TestToStringUtils.cs
deleted file mode 100644
index 6839745..0000000
--- a/src/Lucene.Net.Tests/core/Util/TestToStringUtils.cs
+++ /dev/null
@@ -1,52 +0,0 @@
-\ufeffusing Lucene.Net.Util;
-using NUnit.Framework;
-using System.Globalization;
-using System.Threading;
-
-namespace Lucene.Net.Core.Util
-{
-    /// <summary>
-    /// This test was added for .NET compatibility
-    /// </summary>
-    public class TestToStringUtils : LuceneTestCase
-    {
-        CultureInfo originalCulture;
-        public override void SetUp()
-        {
-            base.SetUp();
-            originalCulture = Thread.CurrentThread.CurrentCulture;
-        }
-
-        public override void TearDown()
-        {
-            Thread.CurrentThread.CurrentCulture = originalCulture;
-            base.TearDown();
-        }
-
-        /// <summary>
-        /// Check to ensure that the Boost function is properly converted in every possible culture.
-        /// </summary>
-        [Test]
-        public void TestBoost()
-        {
-            float boostNormal = 1f;
-            float boostFractional = 2.5f;
-            float boostNonFractional = 5f;
-            float boostLong = 1.111111111f;
-            float boostZeroNonFractional = 0f;
-            float boostZeroFractional = 0.123f;
-
-            foreach (CultureInfo culture in CultureInfo.GetCultures(CultureTypes.SpecificCultures | CultureTypes.NeutralCultures))
-            {
-                Thread.CurrentThread.CurrentCulture = culture;
-
-                assertEquals("", ToStringUtils.Boost(boostNormal));
-                assertEquals("^2.5", ToStringUtils.Boost(boostFractional));
-                assertEquals("^5.0", ToStringUtils.Boost(boostNonFractional));
-                assertEquals("^1.111111", ToStringUtils.Boost(boostLong));
-                assertEquals("^0.0", ToStringUtils.Boost(boostZeroNonFractional));
-                assertEquals("^0.123", ToStringUtils.Boost(boostZeroFractional));
-            }
-        }
-    }
-}


[32/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs b/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
deleted file mode 100644
index 49ef7d4..0000000
--- a/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
+++ /dev/null
@@ -1,912 +0,0 @@
-\ufeffusing Lucene.Net.QueryParser.Surround.Query;
-using System;
-using System.Collections.Generic;
-using System.IO;
-
-namespace Lucene.Net.QueryParser.Surround.Parser
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// This class is generated by JavaCC.  The only method that clients should need
-    /// to call is {@link #parse parse()}.
-    ///
-
-    /// <p>This parser generates queries that make use of position information
-    ///   (Span queries). It provides positional operators (<code>w</code> and
-    ///   <code>n</code>) that accept a numeric distance, as well as boolean
-    ///   operators (<code>and</code>, <code>or</code>, and <code>not</code>,
-    ///   wildcards (<code>///</code> and <code>?</code>), quoting (with
-    ///   <code>"</code>), and boosting (via <code>^</code>).</p>
-
-    ///  <p>The operators (W, N, AND, OR, NOT) can be expressed lower-cased or
-    ///   upper-cased, and the non-unary operators (everything but NOT) support
-    ///   both infix <code>(a AND b AND c)</code> and prefix <code>AND(a, b,
-    ///   c)</code> notation. </p>
-
-    ///  <p>The W and N operators express a positional relationship among their
-    ///  operands.  N is ordered, and W is unordered.  The distance is 1 by
-    ///  default, meaning the operands are adjacent, or may be provided as a
-    ///  prefix from 2-99.  So, for example, 3W(a, b) means that terms a and b
-    ///  must appear within three positions of each other, or in other words, up
-    ///  to two terms may appear between a and b.  </p>
-    /// </summary>
-    public class QueryParser
-    {
-        internal readonly int minimumPrefixLength = 3;
-        internal readonly int minimumCharsInTrunc = 3;
-        internal readonly string truncationErrorMessage = "Too unrestrictive truncation: ";
-        internal readonly string boostErrorMessage = "Cannot handle boost value: ";
-
-        /* CHECKME: These should be the same as for the tokenizer. How? */
-        internal readonly char truncator = '*';
-        internal readonly char anyChar = '?';
-        internal readonly char quote = '"';
-        internal readonly char fieldOperator = ':';
-        internal readonly char comma = ','; /* prefix list separator */
-        internal readonly char carat = '^'; /* weight operator */
-
-        public static SrndQuery Parse(string query)
-        {
-            QueryParser parser = new QueryParser();
-            return parser.Parse2(query);
-        }
-
-        public QueryParser()
-            : this(new FastCharStream(new StringReader("")))
-        {
-        }
-
-        public virtual SrndQuery Parse2(string query)
-        {
-            ReInit(new FastCharStream(new StringReader(query)));
-            try
-            {
-                return TopSrndQuery();
-            }
-            catch (TokenMgrError tme)
-            {
-                throw new ParseException(tme.Message);
-            }
-        }
-
-        protected virtual SrndQuery GetFieldsQuery(
-            SrndQuery q, IEnumerable<string> fieldNames)
-        {
-            /* FIXME: check acceptable subquery: at least one subquery should not be
-             * a fields query.
-             */
-            return new FieldsQuery(q, fieldNames, fieldOperator);
-        }
-
-        protected virtual SrndQuery GetOrQuery(IEnumerable<SrndQuery> queries, bool infix, Token orToken)
-        {
-            return new OrQuery(queries, infix, orToken.image);
-        }
-
-        protected virtual SrndQuery GetAndQuery(IEnumerable<SrndQuery> queries, bool infix, Token andToken)
-        {
-            return new AndQuery(queries, infix, andToken.image);
-        }
-
-        protected virtual SrndQuery GetNotQuery(IEnumerable<SrndQuery> queries, Token notToken)
-        {
-            return new NotQuery(queries, notToken.image);
-        }
-
-        protected static int GetOpDistance(string distanceOp)
-        {
-            /* W, 2W, 3W etc -> 1, 2 3, etc. Same for N, 2N ... */
-            return distanceOp.Length == 1
-              ? 1
-              : int.Parse(distanceOp.Substring(0, distanceOp.Length - 1));
-        }
-
-        protected static void CheckDistanceSubQueries(DistanceQuery distq, string opName)
-        {
-            string m = distq.DistanceSubQueryNotAllowed();
-            if (m != null)
-            {
-                throw new ParseException("Operator " + opName + ": " + m);
-            }
-        }
-
-        protected virtual SrndQuery GetDistanceQuery(
-            IEnumerable<SrndQuery> queries,
-            bool infix,
-            Token dToken,
-            bool ordered)
-        {
-            DistanceQuery dq = new DistanceQuery(queries,
-                                                infix,
-                                                GetOpDistance(dToken.image),
-                                                dToken.image,
-                                                ordered);
-            CheckDistanceSubQueries(dq, dToken.image);
-            return dq;
-        }
-
-        protected virtual SrndQuery GetTermQuery(
-              String term, bool quoted)
-        {
-            return new SrndTermQuery(term, quoted);
-        }
-
-        protected virtual bool AllowedSuffix(String suffixed)
-        {
-            return (suffixed.Length - 1) >= minimumPrefixLength;
-        }
-
-        protected virtual SrndQuery GetPrefixQuery(
-            string prefix, bool quoted)
-        {
-            return new SrndPrefixQuery(prefix, quoted, truncator);
-        }
-
-        protected virtual bool AllowedTruncation(string truncated)
-        {
-            /* At least 3 normal characters needed. */
-            int nrNormalChars = 0;
-            for (int i = 0; i < truncated.Length; i++)
-            {
-                char c = truncated[i];
-                if ((c != truncator) && (c != anyChar))
-                {
-                    nrNormalChars++;
-                }
-            }
-            return nrNormalChars >= minimumCharsInTrunc;
-        }
-
-        protected virtual SrndQuery GetTruncQuery(string truncated)
-        {
-            return new SrndTruncQuery(truncated, truncator, anyChar);
-        }
-
-        public SrndQuery TopSrndQuery()
-        {
-            SrndQuery q;
-            q = FieldsQuery();
-            Jj_consume_token(0);
-            { if (true) return q; }
-            throw new Exception("Missing return statement in function");
-        }
-
-        public SrndQuery FieldsQuery()
-        {
-            SrndQuery q;
-            IEnumerable<string> fieldNames;
-            fieldNames = OptionalFields();
-            q = OrQuery();
-            { if (true) return (fieldNames == null) ? q : GetFieldsQuery(q, fieldNames); }
-            throw new Exception("Missing return statement in function");
-        }
-
-        public IEnumerable<string> OptionalFields()
-        {
-            Token fieldName;
-            IList<string> fieldNames = null;
-
-            while (true)
-            {
-                if (Jj_2_1(2))
-                {
-                    ;
-                }
-                else
-                {
-                    goto label_1;
-                }
-                // to the colon
-                fieldName = Jj_consume_token(RegexpToken.TERM);
-                Jj_consume_token(RegexpToken.COLON);
-                if (fieldNames == null)
-                {
-                    fieldNames = new List<string>();
-                }
-                fieldNames.Add(fieldName.image);
-            }
-        label_1:
-            { if (true) return fieldNames; }
-            throw new Exception("Missing return statement in function");
-        }
-
-        public SrndQuery OrQuery()
-        {
-            SrndQuery q;
-            IList<SrndQuery> queries = null;
-            Token oprt = null;
-            q = AndQuery();
-
-            while (true)
-            {
-                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                {
-                    case RegexpToken.OR:
-                        ;
-                        break;
-                    default:
-                        jj_la1[0] = jj_gen;
-                        goto label_2;
-                }
-                oprt = Jj_consume_token(RegexpToken.OR);
-                /* keep only last used operator */
-                if (queries == null)
-                {
-                    queries = new List<SrndQuery>();
-                    queries.Add(q);
-                }
-                q = AndQuery();
-                queries.Add(q);
-            }
-        label_2:
-            { if (true) return (queries == null) ? q : GetOrQuery(queries, true /* infix */, oprt); }
-            throw new Exception("Missing return statement in function");
-        }
-
-        public SrndQuery AndQuery()
-        {
-            SrndQuery q;
-            IList<SrndQuery> queries = null;
-            Token oprt = null;
-            q = NotQuery();
-
-            while (true)
-            {
-                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                {
-                    case RegexpToken.AND:
-                        ;
-                        break;
-                    default:
-                        jj_la1[1] = jj_gen;
-                        goto label_3;
-                }
-                oprt = Jj_consume_token(RegexpToken.AND);
-                /* keep only last used operator */
-                if (queries == null)
-                {
-                    queries = new List<SrndQuery>();
-                    queries.Add(q);
-                }
-                q = NotQuery();
-                queries.Add(q);
-            }
-        label_3:
-            { if (true) return (queries == null) ? q : GetAndQuery(queries, true /* infix */, oprt); }
-            throw new Exception("Missing return statement in function");
-        }
-
-        public SrndQuery NotQuery()
-        {
-            SrndQuery q;
-            IList<SrndQuery> queries = null;
-            Token oprt = null;
-            q = NQuery();
-
-            while (true)
-            {
-                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                {
-                    case RegexpToken.NOT:
-                        ;
-                        break;
-                    default:
-                        jj_la1[2] = jj_gen;
-                        goto label_4;
-                }
-                oprt = Jj_consume_token(RegexpToken.NOT);
-                /* keep only last used operator */
-                if (queries == null)
-                {
-                    queries = new List<SrndQuery>();
-                    queries.Add(q);
-                }
-                q = NQuery();
-                queries.Add(q);
-            }
-        label_4:
-            { if (true) return (queries == null) ? q : GetNotQuery(queries, oprt); }
-            throw new Exception("Missing return statement in function");
-        }
-
-        public SrndQuery NQuery()
-        {
-            SrndQuery q;
-            IList<SrndQuery> queries;
-            Token dt;
-            q = WQuery();
-
-            while (true)
-            {
-                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                {
-                    case RegexpToken.N:
-                        ;
-                        break;
-                    default:
-                        jj_la1[3] = jj_gen;
-                        goto label_5;
-                }
-                dt = Jj_consume_token(RegexpToken.N);
-                queries = new List<SrndQuery>();
-                queries.Add(q); /* left associative */
-
-                q = WQuery();
-                queries.Add(q);
-                q = GetDistanceQuery(queries, true /* infix */, dt, false /* not ordered */);
-            }
-        label_5:
-            { if (true) return q; }
-            throw new Exception("Missing return statement in function");
-        }
-
-        public SrndQuery WQuery()
-        {
-            SrndQuery q;
-            IList<SrndQuery> queries;
-            Token wt;
-            q = PrimaryQuery();
-
-            while (true)
-            {
-                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                {
-                    case RegexpToken.W:
-                        ;
-                        break;
-                    default:
-                        jj_la1[4] = jj_gen;
-                        goto label_6;
-                }
-                wt = Jj_consume_token(RegexpToken.W);
-                queries = new List<SrndQuery>();
-                queries.Add(q); /* left associative */
-
-                q = PrimaryQuery();
-                queries.Add(q);
-                q = GetDistanceQuery(queries, true /* infix */, wt, true /* ordered */);
-            }
-        label_6:
-            { if (true) return q; }
-            throw new Exception("Missing return statement in function");
-        }
-
-        public SrndQuery PrimaryQuery()
-        {
-            /* bracketed weighted query or weighted term */
-            SrndQuery q;
-            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-            {
-                case RegexpToken.LPAREN:
-                    Jj_consume_token(RegexpToken.LPAREN);
-                    q = FieldsQuery();
-                    Jj_consume_token(RegexpToken.RPAREN);
-                    break;
-                case RegexpToken.OR:
-                case RegexpToken.AND:
-                case RegexpToken.W:
-                case RegexpToken.N:
-                    q = PrefixOperatorQuery();
-                    break;
-                case RegexpToken.TRUNCQUOTED:
-                case RegexpToken.QUOTED:
-                case RegexpToken.SUFFIXTERM:
-                case RegexpToken.TRUNCTERM:
-                case RegexpToken.TERM:
-                    q = SimpleTerm();
-                    break;
-                default:
-                    jj_la1[5] = jj_gen;
-                    Jj_consume_token(-1);
-                    throw new ParseException();
-            }
-            OptionalWeights(q);
-            { if (true) return q; }
-            throw new Exception("Missing return statement in function");
-        }
-
-        public SrndQuery PrefixOperatorQuery()
-        {
-            Token oprt;
-            IEnumerable<SrndQuery> queries;
-            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-            {
-                case RegexpToken.OR:
-                    oprt = Jj_consume_token(RegexpToken.OR);
-                    /* prefix OR */
-                    queries = FieldsQueryList();
-                    { if (true) return GetOrQuery(queries, false /* not infix */, oprt); }
-                    break;
-                case RegexpToken.AND:
-                    oprt = Jj_consume_token(RegexpToken.AND);
-                    /* prefix AND */
-                    queries = FieldsQueryList();
-                    { if (true) return GetAndQuery(queries, false /* not infix */, oprt); }
-                    break;
-                case RegexpToken.N:
-                    oprt = Jj_consume_token(RegexpToken.N);
-                    /* prefix N */
-                    queries = FieldsQueryList();
-                    { if (true) return GetDistanceQuery(queries, false /* not infix */, oprt, false /* not ordered */); }
-                    break;
-                case RegexpToken.W:
-                    oprt = Jj_consume_token(RegexpToken.W);
-                    /* prefix W */
-                    queries = FieldsQueryList();
-                    { if (true) return GetDistanceQuery(queries, false  /* not infix */, oprt, true /* ordered */); }
-                    break;
-                default:
-                    jj_la1[6] = jj_gen;
-                    Jj_consume_token(-1);
-                    throw new ParseException();
-            }
-            throw new Exception("Missing return statement in function");
-        }
-
-        public IEnumerable<SrndQuery> FieldsQueryList()
-        {
-            SrndQuery q;
-            IList<SrndQuery> queries = new List<SrndQuery>();
-            Jj_consume_token(RegexpToken.LPAREN);
-            q = FieldsQuery();
-            queries.Add(q);
-
-            while (true)
-            {
-                Jj_consume_token(RegexpToken.COMMA);
-                q = FieldsQuery();
-                queries.Add(q);
-                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                {
-                    case RegexpToken.COMMA:
-                        ;
-                        break;
-                    default:
-                        jj_la1[7] = jj_gen;
-                        goto label_7;
-                }
-            }
-        label_7:
-            Jj_consume_token(RegexpToken.RPAREN);
-            { if (true) return queries; }
-            throw new Exception("Missing return statement in function");
-        }
-
-        public SrndQuery SimpleTerm()
-        {
-            Token term;
-            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-            {
-                case RegexpToken.TERM:
-                    term = Jj_consume_token(RegexpToken.TERM);
-                    { if (true) return GetTermQuery(term.image, false /* not quoted */); }
-                    break;
-                case RegexpToken.QUOTED:
-                    term = Jj_consume_token(RegexpToken.QUOTED);
-                    // TODO: Substring fix
-                    { if (true) return GetTermQuery(term.image.Substring(1, (term.image.Length - 1) - 1), true /* quoted */); }
-                    break;
-                case RegexpToken.SUFFIXTERM:
-                    term = Jj_consume_token(RegexpToken.SUFFIXTERM);
-                    /* ending in * */
-                    if (!AllowedSuffix(term.image))
-                    {
-                        { if (true) throw new ParseException(truncationErrorMessage + term.image); }
-                    }
-                    // TODO: Substring fix
-                    { if (true) return GetPrefixQuery(term.image.Substring(0, term.image.Length - 1), false /* not quoted */); }
-                    break;
-                case RegexpToken.TRUNCTERM:
-                    term = Jj_consume_token(RegexpToken.TRUNCTERM);
-                    /* with at least one * or ? */
-                    if (!AllowedTruncation(term.image))
-                    {
-                        { if (true) throw new ParseException(truncationErrorMessage + term.image); }
-                    }
-                    { if (true) return GetTruncQuery(term.image); }
-                    break;
-                case RegexpToken.TRUNCQUOTED:
-                    term = Jj_consume_token(RegexpToken.TRUNCQUOTED);
-                    /* eg. "9b-b,m"* */
-                    if ((term.image.Length - 3) < minimumPrefixLength)
-                    {
-                        { if (true) throw new ParseException(truncationErrorMessage + term.image); }
-                    }
-                    // TODO: Substring fix
-                    { if (true) return GetPrefixQuery(term.image.Substring(1, (term.image.Length - 2) - 1), true /* quoted */); }
-                    break;
-                default:
-                    jj_la1[8] = jj_gen;
-                    Jj_consume_token(-1);
-                    throw new ParseException();
-            }
-            throw new Exception("Missing return statement in function");
-        }
-
-        public void OptionalWeights(SrndQuery q)
-        {
-            Token weight = null;
-        
-            while (true)
-            {
-                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                {
-                    case RegexpToken.CARAT:
-                        ;
-                        break;
-                    default:
-                        jj_la1[9] = jj_gen;
-                        goto label_8;
-                }
-                Jj_consume_token(RegexpToken.CARAT);
-                weight = Jj_consume_token(RegexpToken.NUMBER);
-                float f;
-                try
-                {
-                    // TODO: Test parsing float in various cultures (.NET)
-                    f = float.Parse(weight.image);
-                }
-                catch (Exception floatExc)
-                {
-                    { if (true) throw new ParseException(boostErrorMessage + weight.image + " (" + floatExc + ")"); }
-                }
-                if (f <= 0.0)
-                {
-                    { if (true) throw new ParseException(boostErrorMessage + weight.image); }
-                }
-                q.Weight = (f * q.Weight); /* left associative, fwiw */
-            }
-        label_8: ;
-        }
-
-        private bool Jj_2_1(int xla)
-        {
-            jj_la = xla; jj_lastpos = jj_scanpos = token;
-            try { return !Jj_3_1(); }
-            catch (LookaheadSuccess) { return true; }
-            finally { Jj_save(0, xla); }
-        }
-
-        private bool Jj_3_1()
-        {
-            if (Jj_scan_token(RegexpToken.TERM)) return true;
-            if (Jj_scan_token(RegexpToken.COLON)) return true;
-            return false;
-        }
-
-        /** Generated Token Manager. */
-        public QueryParserTokenManager token_source;
-        /** Current token. */
-        public Token token;
-        /** Next token. */
-        public Token jj_nt;
-        private int jj_ntk;
-        private Token jj_scanpos, jj_lastpos;
-        private int jj_la;
-        private int jj_gen;
-        private readonly int[] jj_la1 = new int[10];
-        private static int[] jj_la1_0;
-        static QueryParser()
-        {
-            Jj_la1_init_0();
-        }
-
-        private static void Jj_la1_init_0()
-        {
-            jj_la1_0 = new int[] { 0x100, 0x200, 0x400, 0x1000, 0x800, 0x7c3b00, 0x1b00, 0x8000, 0x7c0000, 0x20000, };
-        }
-        private readonly JJCalls[] jj_2_rtns = new JJCalls[1];
-        private bool jj_rescan = false;
-        private int jj_gc = 0;
-
-        /// <summary>
-        /// Constructor with user supplied CharStream.
-        /// </summary>
-        /// <param name="stream"></param>
-        public QueryParser(ICharStream stream)
-        {
-            token_source = new QueryParserTokenManager(stream);
-            token = new Token();
-            jj_ntk = -1;
-            jj_gen = 0;
-            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
-            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
-        }
-
-        /// <summary>
-        /// Reinitialise.
-        /// </summary>
-        /// <param name="stream"></param>
-        public virtual void ReInit(ICharStream stream)
-        {
-            token_source.ReInit(stream);
-            token = new Token();
-            jj_ntk = -1;
-            jj_gen = 0;
-            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
-            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
-        }
-
-        /// <summary>
-        /// Constructor with generated Token Manager.
-        /// </summary>
-        /// <param name="tm"></param>
-        public QueryParser(QueryParserTokenManager tm)
-        {
-            token_source = tm;
-            token = new Token();
-            jj_ntk = -1;
-            jj_gen = 0;
-            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
-            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
-        }
-
-        /// <summary>
-        /// Reinitialise.
-        /// </summary>
-        /// <param name="tm"></param>
-        public virtual void ReInit(QueryParserTokenManager tm)
-        {
-            token_source = tm;
-            token = new Token();
-            jj_ntk = -1;
-            jj_gen = 0;
-            for (int i = 0; i < 10; i++) jj_la1[i] = -1;
-            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
-        }
-
-        private Token Jj_consume_token(int kind)
-        {
-            Token oldToken;
-            if ((oldToken = token).next != null) token = token.next;
-            else token = token.next = token_source.GetNextToken();
-            jj_ntk = -1;
-            if (token.kind == kind)
-            {
-                jj_gen++;
-                if (++jj_gc > 100)
-                {
-                    jj_gc = 0;
-                    for (int i = 0; i < jj_2_rtns.Length; i++)
-                    {
-                        JJCalls c = jj_2_rtns[i];
-                        while (c != null)
-                        {
-                            if (c.gen < jj_gen) c.first = null;
-                            c = c.next;
-                        }
-                    }
-                }
-                return token;
-            }
-            token = oldToken;
-            jj_kind = kind;
-            throw GenerateParseException();
-        }
-
-        private sealed class LookaheadSuccess : Exception { }
-        private readonly LookaheadSuccess jj_ls = new LookaheadSuccess();
-
-        private bool Jj_scan_token(int kind)
-        {
-            if (jj_scanpos == jj_lastpos)
-            {
-                jj_la--;
-                if (jj_scanpos.next == null)
-                {
-                    jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.GetNextToken();
-                }
-                else
-                {
-                    jj_lastpos = jj_scanpos = jj_scanpos.next;
-                }
-            }
-            else
-            {
-                jj_scanpos = jj_scanpos.next;
-            }
-            if (jj_rescan)
-            {
-                int i = 0; Token tok = token;
-                while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; }
-                if (tok != null) Jj_add_error_token(kind, i);
-            }
-            if (jj_scanpos.kind != kind) return true;
-            if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;
-            return false;
-        }
-
-        /// <summary>
-        /// Get the next Token.
-        /// </summary>
-        /// <returns></returns>
-        public Token GetNextToken()
-        {
-            if (token.next != null) token = token.next;
-            else token = token.next = token_source.GetNextToken();
-            jj_ntk = -1;
-            jj_gen++;
-            return token;
-        }
-
-        /// <summary>
-        /// Get the specific Token.
-        /// </summary>
-        /// <param name="index"></param>
-        /// <returns></returns>
-        public Token GetToken(int index)
-        {
-            Token t = token;
-            for (int i = 0; i < index; i++)
-            {
-                if (t.next != null) t = t.next;
-                else t = t.next = token_source.GetNextToken();
-            }
-            return t;
-        }
-
-        private int Jj_ntk()
-        {
-            if ((jj_nt = token.next) == null)
-                return (jj_ntk = (token.next = token_source.GetNextToken()).kind);
-            else
-                return (jj_ntk = jj_nt.kind);
-        }
-
-        private IList<int[]> jj_expentries = new List<int[]>();
-        private int[] jj_expentry;
-        private int jj_kind = -1;
-        private int[] jj_lasttokens = new int[100];
-        private int jj_endpos;
-
-        private void Jj_add_error_token(int kind, int pos)
-        {
-            if (pos >= 100) return;
-            if (pos == jj_endpos + 1)
-            {
-                jj_lasttokens[jj_endpos++] = kind;
-            }
-            else if (jj_endpos != 0)
-            {
-                jj_expentry = new int[jj_endpos];
-                for (int i = 0; i < jj_endpos; i++)
-                {
-                    jj_expentry[i] = jj_lasttokens[i];
-                }
-                foreach (var oldentry in jj_expentries)
-                {
-                    if (oldentry.Length == jj_expentry.Length)
-                    {
-                        for (int i = 0; i < jj_expentry.Length; i++)
-                        {
-                            if (oldentry[i] != jj_expentry[i])
-                            {
-                                continue;
-                            }
-                        }
-                        jj_expentries.Add(jj_expentry);
-                        break;
-                    }
-                }
-                if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind;
-            }
-        }
-
-        /// <summary>
-        /// Generate ParseException.
-        /// </summary>
-        /// <returns></returns>
-        public virtual ParseException GenerateParseException()
-        {
-            jj_expentries.Clear();
-            bool[] la1tokens = new bool[24];
-            if (jj_kind >= 0)
-            {
-                la1tokens[jj_kind] = true;
-                jj_kind = -1;
-            }
-            for (int i = 0; i < 10; i++)
-            {
-                if (jj_la1[i] == jj_gen)
-                {
-                    for (int j = 0; j < 32; j++)
-                    {
-                        if ((jj_la1_0[i] & (1 << j)) != 0)
-                        {
-                            la1tokens[j] = true;
-                        }
-                    }
-                }
-            }
-            for (int i = 0; i < 24; i++)
-            {
-                if (la1tokens[i])
-                {
-                    jj_expentry = new int[1];
-                    jj_expentry[0] = i;
-                    jj_expentries.Add(jj_expentry);
-                }
-            }
-            jj_endpos = 0;
-            Jj_rescan_token();
-            Jj_add_error_token(0, 0);
-            int[][] exptokseq = new int[jj_expentries.Count][];
-            for (int i = 0; i < jj_expentries.Count; i++)
-            {
-                exptokseq[i] = jj_expentries[i];
-            }
-            return new ParseException(token, exptokseq, QueryParserConstants.TokenImage);
-        }
-
-        /// <summary>Enable tracing. </summary>
-        public void Enable_tracing()
-        {
-        }
-
-        /// <summary>Disable tracing. </summary>
-        public void Disable_tracing()
-        {
-        }
-
-        private void Jj_rescan_token()
-        {
-            jj_rescan = true;
-            for (int i = 0; i < 1; i++)
-            {
-                try
-                {
-                    JJCalls p = jj_2_rtns[i];
-                    do
-                    {
-                        if (p.gen > jj_gen)
-                        {
-                            jj_la = p.arg; jj_lastpos = jj_scanpos = p.first;
-                            switch (i)
-                            {
-                                case 0: Jj_3_1(); break;
-                            }
-                        }
-                        p = p.next;
-                    } while (p != null);
-                }
-                catch (LookaheadSuccess ls) { }
-            }
-            jj_rescan = false;
-        }
-
-        private void Jj_save(int index, int xla)
-        {
-            JJCalls p = jj_2_rtns[index];
-            while (p.gen > jj_gen)
-            {
-                if (p.next == null) { p = p.next = new JJCalls(); break; }
-                p = p.next;
-            }
-            p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla;
-        }
-
-        internal sealed class JJCalls
-        {
-            internal int gen;
-            internal Token first;
-            internal int arg;
-            internal JJCalls next;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs b/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
deleted file mode 100644
index 262f76b..0000000
--- a/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
+++ /dev/null
@@ -1,120 +0,0 @@
-\ufeffusing System;
-
-namespace Lucene.Net.QueryParser.Surround.Parser
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    public static class RegexpToken
-    {
-        /// <summary>End of File. </summary>
-        public const int EOF = 0;
-        /// <summary>RegularExpression Id. </summary>
-        public const int _NUM_CHAR = 1;
-        /// <summary>RegularExpression Id. </summary>
-        public const int _TERM_CHAR = 2;
-        /// <summary>RegularExpression Id. </summary>
-        public const int _WHITESPACE = 3;
-        /// <summary>RegularExpression Id. </summary>
-        public const int _STAR = 4;
-        /// <summary>RegularExpression Id. </summary>
-        public const int _ONE_CHAR = 5;
-        /// <summary>RegularExpression Id. </summary>
-        public const int _DISTOP_NUM = 6;
-        /// <summary>RegularExpression Id. </summary>
-        public const int OR = 8;
-        /// <summary>RegularExpression Id. </summary>
-        public const int AND = 9;
-        /// <summary>RegularExpression Id. </summary>
-        public const int NOT = 10;
-        /// <summary>RegularExpression Id. </summary>
-        public const int W = 11;
-        /// <summary>RegularExpression Id. </summary>
-        public const int N = 12;
-        /// <summary>RegularExpression Id. </summary>
-        public const int LPAREN = 13;
-        /// <summary>RegularExpression Id. </summary>
-        public const int RPAREN = 14;
-        /// <summary>RegularExpression Id. </summary>
-        public const int COMMA = 15;
-        /// <summary>RegularExpression Id. </summary>
-        public const int COLON = 16;
-        /// <summary>RegularExpression Id. </summary>
-        public const int CARAT = 17;
-        /// <summary>RegularExpression Id. </summary>
-        public const int TRUNCQUOTED = 18;
-        /// <summary>RegularExpression Id. </summary>
-        public const int QUOTED = 19;
-        /// <summary>RegularExpression Id. </summary>
-        public const int SUFFIXTERM = 20;
-        /// <summary>RegularExpression Id. </summary>
-        public const int TRUNCTERM = 21;
-        /// <summary>RegularExpression Id. </summary>
-        public const int TERM = 22;
-        /// <summary>RegularExpression Id. </summary>
-        public const int NUMBER = 23;
-    }
-
-    public static class LexicalToken
-    {
-        /// <summary>Lexical state.</summary>
-        public const int Boost = 0;
-        /// <summary>Lexical state.</summary>
-        public const int DEFAULT = 2;
-    }
-
-    // NOTE: In Java, this was an interface. However, in 
-    // .NET we cannot define constants in an interface.
-    // So, instead we are making it a static class so it 
-    // can be shared between classes with different base classes.
-
-    // public interface QueryParserConstants
-
-    /// <summary> Token literal values and constants.
-    /// Generated by org.javacc.parser.OtherFilesGen#start()
-    /// </summary>
-    public static class QueryParserConstants
-    {
-        /// <summary>Literal token values. </summary>
-        public static string[] TokenImage = new string[] {
-            "<EOF>",
-            "<_NUM_CHAR>",
-            "<_TERM_CHAR>",
-            "<_WHITESPACE>",
-            "\"*\"",
-            "\"?\"",
-            "<_DISTOP_NUM>",
-            "<token of kind 7>",
-            "<OR>",
-            "<AND>",
-            "<NOT>",
-            "<W>",
-            "<N>",
-            "\"(\"",
-            "\")\"",
-            "\",\"",
-            "\":\"",
-            "\"^\"",
-            "<TRUNCQUOTED>",
-            "<QUOTED>",
-            "<SUFFIXTERM>",
-            "<TRUNCTERM>",
-            "<TERM>",
-            "<NUMBER>"
-        };
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs b/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
deleted file mode 100644
index ac3d611..0000000
--- a/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
+++ /dev/null
@@ -1,760 +0,0 @@
-\ufeffusing System;
-using System.Diagnostics.CodeAnalysis;
-using System.IO;
-
-namespace Lucene.Net.QueryParser.Surround.Parser
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Token Manager.
-    /// </summary>
-    public class QueryParserTokenManager //: QueryParserConstants
-    {
-        private void InitBlock()
-        {
-            StreamWriter temp_writer;
-            temp_writer = new StreamWriter(Console.OpenStandardOutput(), Console.Out.Encoding);
-            temp_writer.AutoFlush = true;
-            debugStream = temp_writer;
-        }
-
-        /// <summary>Debug output. </summary>
-        public StreamWriter debugStream;
-        /// <summary>Set debug output. </summary>
-        public virtual void SetDebugStream(StreamWriter ds)
-        {
-            debugStream = ds;
-        }
-        private int JjStopStringLiteralDfa_1(int pos, long active0)
-        {
-            switch (pos)
-            {
-                default:
-                    return -1;
-            }
-        }
-        private int JjStartNfa_1(int pos, long active0)
-        {
-            return JjMoveNfa_1(JjStopStringLiteralDfa_1(pos, active0), pos + 1);
-        }
-        private int JjStopAtPos(int pos, int kind)
-        {
-            jjmatchedKind = kind;
-            jjmatchedPos = pos;
-            return pos + 1;
-        }
-        private int jjMoveStringLiteralDfa0_1()
-        {
-            switch (curChar)
-            {
-                case (char)40:
-                    return JjStopAtPos(0, 13);
-                case (char)41:
-                    return JjStopAtPos(0, 14);
-                case (char)44:
-                    return JjStopAtPos(0, 15);
-                case (char)58:
-                    return JjStopAtPos(0, 16);
-                case (char)94:
-                    return JjStopAtPos(0, 17);
-                default:
-                    return JjMoveNfa_1(0, 0);
-            }
-        }
-        internal static readonly ulong[] jjbitVec0 = {
-            0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL
-        };
-        internal static readonly ulong[] jjbitVec2 = {
-            0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
-        };
-        private int JjMoveNfa_1(int startState, int curPos)
-        {
-            int startsAt = 0;
-            jjnewStateCnt = 38;
-            int i = 1;
-            jjstateSet[0] = startState;
-            int kind = 0x7fffffff;
-            for (; ; )
-            {
-                if (++jjround == 0x7fffffff)
-                    ReInitRounds();
-                if (curChar < 64)
-                {
-                    ulong l = (ulong)(1L << (int)curChar);
-                    do
-                    {
-                        switch (jjstateSet[--i])
-                        {
-                            case 0:
-                                if ((0x7bffe8faffffd9ffL & l) != 0L)
-                                {
-                                    if (kind > 22)
-                                        kind = 22;
-                                    JjCheckNAddStates(0, 4);
-                                }
-                                else if ((0x100002600L & l) != 0L)
-                                {
-                                    if (kind > 7)
-                                        kind = 7;
-                                }
-                                else if (curChar == 34)
-                                    JjCheckNAddStates(5, 7);
-                                if ((0x3fc000000000000L & l) != 0L)
-                                    JjCheckNAddStates(8, 11);
-                                else if (curChar == 49)
-                                    JjCheckNAddTwoStates(20, 21);
-                                break;
-                            case 19:
-                                if ((0x3fc000000000000L & l) != 0L)
-                                    JjCheckNAddStates(8, 11);
-                                break;
-                            case 20:
-                                if ((0x3ff000000000000L & l) != 0L)
-                                    JjCheckNAdd(17);
-                                break;
-                            case 21:
-                                if ((0x3ff000000000000L & l) != 0L)
-                                    JjCheckNAdd(18);
-                                break;
-                            case 22:
-                                if (curChar == 49)
-                                    JjCheckNAddTwoStates(20, 21);
-                                break;
-                            case 23:
-                                if (curChar == 34)
-                                    JjCheckNAddStates(5, 7);
-                                break;
-                            case 24:
-                                if ((0xfffffffbffffffffL & l) != (ulong)0L)
-                                    JjCheckNAddTwoStates(24, 25);
-                                break;
-                            case 25:
-                                if (curChar == 34)
-                                    jjstateSet[jjnewStateCnt++] = 26;
-                                break;
-                            case 26:
-                                if (curChar == 42 && kind > 18)
-                                    kind = 18;
-                                break;
-                            case 27:
-                                if ((0xfffffffbffffffffL & l) != (ulong)0L)
-                                    JjCheckNAddStates(12, 14);
-                                break;
-                            case 29:
-                                if (curChar == 34)
-                                    JjCheckNAddStates(12, 14);
-                                break;
-                            case 30:
-                                if (curChar == 34 && kind > 19)
-                                    kind = 19;
-                                break;
-                            case 31:
-                                if ((0x7bffe8faffffd9ffL & l) == 0L)
-                                    break;
-                                if (kind > 22)
-                                    kind = 22;
-                                JjCheckNAddStates(0, 4);
-                                break;
-                            case 32:
-                                if ((0x7bffe8faffffd9ffL & l) != 0L)
-                                    JjCheckNAddTwoStates(32, 33);
-                                break;
-                            case 33:
-                                if (curChar == 42 && kind > 20)
-                                    kind = 20;
-                                break;
-                            case 34:
-                                if ((0x7bffe8faffffd9ffL & l) != 0L)
-                                    JjCheckNAddTwoStates(34, 35);
-                                break;
-                            case 35:
-                                if ((0x8000040000000000L & l) == (ulong)0L)
-                                    break;
-                                if (kind > 21)
-                                    kind = 21;
-                                JjCheckNAddTwoStates(35, 36);
-                                break;
-                            case 36:
-                                if ((0xfbffecfaffffd9ffL & l) == (ulong)0L)
-                                    break;
-                                if (kind > 21)
-                                    kind = 21;
-                                JjCheckNAdd(36);
-                                break;
-                            case 37:
-                                if ((0x7bffe8faffffd9ffL & l) == 0L)
-                                    break;
-                                if (kind > 22)
-                                    kind = 22;
-                                JjCheckNAdd(37);
-                                break;
-                            default: break;
-                        }
-                    } while (i != startsAt);
-                }
-                else if (curChar < 128)
-                {
-                    // NOTE: See the note in the Classic.QueryParserTokenManager.cs file.
-                    // I am working under the assumption 63 is the correct value, since it
-                    // made the tests pass there.
-                    ulong l = (ulong)(1L << (curChar & 63));
-                    //long l = 1L << (curChar & 077);
-                    do
-                    {
-                        switch (jjstateSet[--i])
-                        {
-                            case 0:
-                                if ((0xffffffffbfffffffL & l) != (ulong)0L)
-                                {
-                                    if (kind > 22)
-                                        kind = 22;
-                                    JjCheckNAddStates(0, 4);
-                                }
-                                if ((0x400000004000L & l) != 0L)
-                                {
-                                    if (kind > 12)
-                                        kind = 12;
-                                }
-                                else if ((0x80000000800000L & l) != 0L)
-                                {
-                                    if (kind > 11)
-                                        kind = 11;
-                                }
-                                else if (curChar == 97)
-                                    jjstateSet[jjnewStateCnt++] = 9;
-                                else if (curChar == 65)
-                                    jjstateSet[jjnewStateCnt++] = 6;
-                                else if (curChar == 111)
-                                    jjstateSet[jjnewStateCnt++] = 3;
-                                else if (curChar == 79)
-                                    jjstateSet[jjnewStateCnt++] = 1;
-                                if (curChar == 110)
-                                    jjstateSet[jjnewStateCnt++] = 15;
-                                else if (curChar == 78)
-                                    jjstateSet[jjnewStateCnt++] = 12;
-                                break;
-                            case 1:
-                                if (curChar == 82 && kind > 8)
-                                    kind = 8;
-                                break;
-                            case 2:
-                                if (curChar == 79)
-                                    jjstateSet[jjnewStateCnt++] = 1;
-                                break;
-                            case 3:
-                                if (curChar == 114 && kind > 8)
-                                    kind = 8;
-                                break;
-                            case 4:
-                                if (curChar == 111)
-                                    jjstateSet[jjnewStateCnt++] = 3;
-                                break;
-                            case 5:
-                                if (curChar == 68 && kind > 9)
-                                    kind = 9;
-                                break;
-                            case 6:
-                                if (curChar == 78)
-                                    jjstateSet[jjnewStateCnt++] = 5;
-                                break;
-                            case 7:
-                                if (curChar == 65)
-                                    jjstateSet[jjnewStateCnt++] = 6;
-                                break;
-                            case 8:
-                                if (curChar == 100 && kind > 9)
-                                    kind = 9;
-                                break;
-                            case 9:
-                                if (curChar == 110)
-                                    jjstateSet[jjnewStateCnt++] = 8;
-                                break;
-                            case 10:
-                                if (curChar == 97)
-                                    jjstateSet[jjnewStateCnt++] = 9;
-                                break;
-                            case 11:
-                                if (curChar == 84 && kind > 10)
-                                    kind = 10;
-                                break;
-                            case 12:
-                                if (curChar == 79)
-                                    jjstateSet[jjnewStateCnt++] = 11;
-                                break;
-                            case 13:
-                                if (curChar == 78)
-                                    jjstateSet[jjnewStateCnt++] = 12;
-                                break;
-                            case 14:
-                                if (curChar == 116 && kind > 10)
-                                    kind = 10;
-                                break;
-                            case 15:
-                                if (curChar == 111)
-                                    jjstateSet[jjnewStateCnt++] = 14;
-                                break;
-                            case 16:
-                                if (curChar == 110)
-                                    jjstateSet[jjnewStateCnt++] = 15;
-                                break;
-                            case 17:
-                                if ((0x80000000800000L & l) != 0L && kind > 11)
-                                    kind = 11;
-                                break;
-                            case 18:
-                                if ((0x400000004000L & l) != 0L && kind > 12)
-                                    kind = 12;
-                                break;
-                            case 24:
-                                JjAddStates(15, 16);
-                                break;
-                            case 27:
-                                if ((0xffffffffefffffffL & l) != (ulong)0L)
-                                    JjCheckNAddStates(12, 14);
-                                break;
-                            case 28:
-                                if (curChar == 92)
-                                    jjstateSet[jjnewStateCnt++] = 29;
-                                break;
-                            case 29:
-                                if (curChar == 92)
-                                    JjCheckNAddStates(12, 14);
-                                break;
-                            case 31:
-                                if ((0xffffffffbfffffffL & l) == (ulong)0L)
-                                    break;
-                                if (kind > 22)
-                                    kind = 22;
-                                JjCheckNAddStates(0, 4);
-                                break;
-                            case 32:
-                                if ((0xffffffffbfffffffL & l) != (ulong)0L)
-                                    JjCheckNAddTwoStates(32, 33);
-                                break;
-                            case 34:
-                                if ((0xffffffffbfffffffL & l) != (ulong)0L)
-                                    JjCheckNAddTwoStates(34, 35);
-                                break;
-                            case 36:
-                                if ((0xffffffffbfffffffL & l) == (ulong)0L)
-                                    break;
-                                if (kind > 21)
-                                    kind = 21;
-                                jjstateSet[jjnewStateCnt++] = 36;
-                                break;
-                            case 37:
-                                if ((0xffffffffbfffffffL & l) == (ulong)0L)
-                                    break;
-                                if (kind > 22)
-                                    kind = 22;
-                                JjCheckNAdd(37);
-                                break;
-                            default: break;
-                        }
-                    } while (i != startsAt);
-                }
-                else
-                {
-                    int hiByte = (int)(curChar >> 8);
-                    int i1 = hiByte >> 6;
-                    //long l1 = 1L << (hiByte & 077);
-                    ulong l1 = (ulong)(1L << (hiByte & 63));
-                    int i2 = (curChar & 0xff) >> 6;
-                    //long l2 = 1L << (curChar & 077);
-                    ulong l2 = (ulong)(1L << (curChar & 63));
-                    do
-                    {
-                        switch (jjstateSet[--i])
-                        {
-                            case 0:
-                                if (!JjCanMove_0(hiByte, i1, i2, l1, l2))
-                                    break;
-                                if (kind > 22)
-                                    kind = 22;
-                                JjCheckNAddStates(0, 4);
-                                break;
-                            case 24:
-                                if (JjCanMove_0(hiByte, i1, i2, l1, l2))
-                                    JjAddStates(15, 16);
-                                break;
-                            case 27:
-                                if (JjCanMove_0(hiByte, i1, i2, l1, l2))
-                                    JjAddStates(12, 14);
-                                break;
-                            case 32:
-                                if (JjCanMove_0(hiByte, i1, i2, l1, l2))
-                                    JjCheckNAddTwoStates(32, 33);
-                                break;
-                            case 34:
-                                if (JjCanMove_0(hiByte, i1, i2, l1, l2))
-                                    JjCheckNAddTwoStates(34, 35);
-                                break;
-                            case 36:
-                                if (!JjCanMove_0(hiByte, i1, i2, l1, l2))
-                                    break;
-                                if (kind > 21)
-                                    kind = 21;
-                                jjstateSet[jjnewStateCnt++] = 36;
-                                break;
-                            case 37:
-                                if (!JjCanMove_0(hiByte, i1, i2, l1, l2))
-                                    break;
-                                if (kind > 22)
-                                    kind = 22;
-                                JjCheckNAdd(37);
-                                break;
-                            default: break;
-                        }
-                    } while (i != startsAt);
-                }
-                if (kind != 0x7fffffff)
-                {
-                    jjmatchedKind = kind;
-                    jjmatchedPos = curPos;
-                    kind = 0x7fffffff;
-                }
-                ++curPos;
-                if ((i = jjnewStateCnt) == (startsAt = 38 - (jjnewStateCnt = startsAt)))
-                    return curPos;
-                try { curChar = input_stream.ReadChar(); }
-                catch (System.IO.IOException e) { return curPos; }
-            }
-        }
-
-        private int JjMoveStringLiteralDfa0_0()
-        {
-            return JjMoveNfa_0(0, 0);
-        }
-        private int JjMoveNfa_0(int startState, int curPos)
-        {
-            int startsAt = 0;
-            jjnewStateCnt = 3;
-            int i = 1;
-            jjstateSet[0] = startState;
-            int kind = 0x7fffffff;
-            for (; ; )
-            {
-                if (++jjround == 0x7fffffff)
-                    ReInitRounds();
-                if (curChar < 64)
-                {
-                    long l = 1L << curChar;
-                    do
-                    {
-                        switch (jjstateSet[--i])
-                        {
-                            case 0:
-                                if ((0x3ff000000000000L & l) == 0L)
-                                    break;
-                                if (kind > 23)
-                                    kind = 23;
-                                JjAddStates(17, 18);
-                                break;
-                            case 1:
-                                if (curChar == 46)
-                                    JjCheckNAdd(2);
-                                break;
-                            case 2:
-                                if ((0x3ff000000000000L & l) == 0L)
-                                    break;
-                                if (kind > 23)
-                                    kind = 23;
-                                JjCheckNAdd(2);
-                                break;
-                            default: break;
-                        }
-                    } while (i != startsAt);
-                }
-                else if (curChar < 128)
-                {
-                    //long l = 1L << (curChar & 077);
-                    ulong l = (ulong)(1L << (curChar & 63)); 
-                    do
-                    {
-                        switch (jjstateSet[--i])
-                        {
-                            default: break;
-                        }
-                    } while (i != startsAt);
-                }
-                else
-                {
-                    int hiByte = (int)(curChar >> 8);
-                    int i1 = hiByte >> 6;
-                    //long l1 = 1L << (hiByte & 077);
-                    ulong l1 = (ulong)(1L << (hiByte & 63));
-                    int i2 = (curChar & 0xff) >> 6;
-                    //long l2 = 1L << (curChar & 077);
-                    ulong l2 = (ulong)(1L << (curChar & 63));
-                    do
-                    {
-                        switch (jjstateSet[--i])
-                        {
-                            default: break;
-                        }
-                    } while (i != startsAt);
-                }
-                if (kind != 0x7fffffff)
-                {
-                    jjmatchedKind = kind;
-                    jjmatchedPos = curPos;
-                    kind = 0x7fffffff;
-                }
-                ++curPos;
-                if ((i = jjnewStateCnt) == (startsAt = 3 - (jjnewStateCnt = startsAt)))
-                    return curPos;
-                try { curChar = input_stream.ReadChar(); }
-                catch (System.IO.IOException e) { return curPos; }
-            }
-        }
-        internal static readonly int[] jjnextStates = {
-            32, 33, 34, 35, 37, 24, 27, 28, 20, 17, 21, 18, 27, 28, 30, 24, 
-            25, 0, 1, 
-        };
-        private static bool JjCanMove_0(int hiByte, int i1, int i2, ulong l1, ulong l2)
-        {
-            switch (hiByte)
-            {
-                case 0:
-                    return ((jjbitVec2[i2] & l2) != 0L);
-                default:
-                    if ((jjbitVec0[i1] & l1) != 0L)
-                        return true;
-                    return false;
-            }
-        }
-
-        /** Token literal values. */
-        //public static readonly string[] jjstrLiteralImages = {
-        //    "", null, null, null, null, null, null, null, null, null, null, null, null, 
-        //    "\50", "\51", "\54", "\72", "\136", null, null, null, null, null, null 
-        //};
-
-        public static readonly string[] jjstrLiteralImages = {
-            "", null, null, null, null, null, null, null, null, null, null, null, null, 
-            "\x0028" /*"\50"*/, "\x0029" /*"\51"*/, "\x002C" /*"\54"*/, "\x003A" /*"\72"*/, "\x005E" /*"\136"*/, null, null, null, null, null, null 
-        };
-
-        /** Lexer state names. */
-        public static readonly string[] lexStateNames = {
-           "Boost",
-           "DEFAULT"
-        };
-
-        /** Lex State array. */
-        public static readonly int[] jjnewLexState = {
-           -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, -1, -1, -1, -1, -1, 1, 
-        };
-        internal static readonly long[] jjtoToken = {
-           0xffff01L, 
-        };
-        internal static readonly long[] jjtoSkip = {
-           0x80L, 
-        };
-        protected ICharStream input_stream;
-        private readonly uint[] jjrounds = new uint[38];
-        private readonly int[] jjstateSet = new int[76];
-        protected internal char curChar;
-
-        /** Constructor. */
-        public QueryParserTokenManager(ICharStream stream)
-        {
-            InitBlock();
-            input_stream = stream;
-        }
-
-        /** Constructor. */
-        public QueryParserTokenManager(ICharStream stream, int lexState)
-            : this(stream)
-        {
-            SwitchTo(lexState);
-        }
-
-        /** Reinitialise parser. */
-        public void ReInit(ICharStream stream)
-        {
-            jjmatchedPos = jjnewStateCnt = 0;
-            curLexState = defaultLexState;
-            input_stream = stream;
-            ReInitRounds();
-        }
-        private void ReInitRounds()
-        {
-            int i;
-            jjround = 0x80000001;
-            for (i = 38; i-- > 0; )
-                jjrounds[i] = 0x80000000;
-        }
-
-        /** Reinitialise parser. */
-        public void ReInit(ICharStream stream, int lexState)
-        {
-            ReInit(stream);
-            SwitchTo(lexState);
-        }
-
-        /** Switch to specified lex state. */
-        public void SwitchTo(int lexState)
-        {
-            if (lexState >= 2 || lexState < 0)
-                throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
-            else
-                curLexState = lexState;
-        }
-
-        protected Token JjFillToken()
-        {
-            Token t;
-            string curTokenImage;
-            int beginLine;
-            int endLine;
-            int beginColumn;
-            int endColumn;
-            string im = jjstrLiteralImages[jjmatchedKind];
-            curTokenImage = (im == null) ? input_stream.Image : im;
-            beginLine = input_stream.BeginLine;
-            beginColumn = input_stream.BeginColumn;
-            endLine = input_stream.EndLine;
-            endColumn = input_stream.EndColumn;
-            t = Token.NewToken(jjmatchedKind, curTokenImage);
-
-            t.beginLine = beginLine;
-            t.endLine = endLine;
-            t.beginColumn = beginColumn;
-            t.endColumn = endColumn;
-
-            return t;
-        }
-
-        internal int curLexState = 1;
-        internal int defaultLexState = 1;
-        internal int jjnewStateCnt;
-        internal uint jjround;
-        internal int jjmatchedPos;
-        internal int jjmatchedKind;
-
-        /// <summary>Get the next Token.</summary>
-        [SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate")]
-        public Token GetNextToken()
-        {
-            Token matchedToken;
-            int curPos = 0;
-
-            for (; ; )
-            {
-                try
-                {
-                    curChar = input_stream.BeginToken();
-                }
-                catch (System.IO.IOException e)
-                {
-                    jjmatchedKind = 0;
-                    matchedToken = JjFillToken();
-                    return matchedToken;
-                }
-
-                switch (curLexState)
-                {
-                    case 0:
-                        jjmatchedKind = 0x7fffffff;
-                        jjmatchedPos = 0;
-                        curPos = JjMoveStringLiteralDfa0_0();
-                        break;
-                    case 1:
-                        jjmatchedKind = 0x7fffffff;
-                        jjmatchedPos = 0;
-                        curPos = jjMoveStringLiteralDfa0_1();
-                        break;
-                }
-                if (jjmatchedKind != 0x7fffffff)
-                {
-                    if (jjmatchedPos + 1 < curPos)
-                        input_stream.Backup(curPos - jjmatchedPos - 1);
-                    if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
-                    {
-                        matchedToken = JjFillToken();
-                        if (jjnewLexState[jjmatchedKind] != -1)
-                            curLexState = jjnewLexState[jjmatchedKind];
-                        return matchedToken;
-                    }
-                    else
-                    {
-                        if (jjnewLexState[jjmatchedKind] != -1)
-                            curLexState = jjnewLexState[jjmatchedKind];
-                        goto EOFLoop;
-                    }
-                }
-                int error_line = input_stream.EndLine;
-                int error_column = input_stream.EndColumn;
-                string error_after = null;
-                bool EOFSeen = false;
-                try { input_stream.ReadChar(); input_stream.Backup(1); }
-                catch (System.IO.IOException e1)
-                {
-                    EOFSeen = true;
-                    error_after = curPos <= 1 ? "" : input_stream.Image;
-                    if (curChar == '\n' || curChar == '\r')
-                    {
-                        error_line++;
-                        error_column = 0;
-                    }
-                    else
-                        error_column++;
-                }
-                if (!EOFSeen)
-                {
-                    input_stream.Backup(1);
-                    error_after = curPos <= 1 ? "" : input_stream.Image;
-                }
-                throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
-            EOFLoop: ;
-            }
-        }
-
-        private void JjCheckNAdd(int state)
-        {
-            if (jjrounds[state] != jjround)
-            {
-                jjstateSet[jjnewStateCnt++] = state;
-                jjrounds[state] = jjround;
-            }
-        }
-        private void JjAddStates(int start, int end)
-        {
-            do
-            {
-                jjstateSet[jjnewStateCnt++] = jjnextStates[start];
-            } while (start++ != end);
-        }
-        private void JjCheckNAddTwoStates(int state1, int state2)
-        {
-            JjCheckNAdd(state1);
-            JjCheckNAdd(state2);
-        }
-
-        private void JjCheckNAddStates(int start, int end)
-        {
-            do
-            {
-                JjCheckNAdd(jjnextStates[start]);
-            } while (start++ != end);
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Parser/Token.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/Token.cs b/Lucene.Net.QueryParser/Surround/Parser/Token.cs
deleted file mode 100644
index 2d9b83d..0000000
--- a/Lucene.Net.QueryParser/Surround/Parser/Token.cs
+++ /dev/null
@@ -1,142 +0,0 @@
-\ufeffusing System;
-
-namespace Lucene.Net.QueryParser.Surround.Parser
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-	
-	/// <summary> 
-    /// Describes the input token stream.
-    /// </summary>
-    [Serializable]
-	public class Token
-	{
-		
-		/// <summary> 
-        /// An integer that describes the kind of this token.  This numbering
-		/// system is determined by JavaCCParser, and a table of these numbers is
-		/// stored in the file ...Constants.java.
-		/// </summary>
-		public int kind;
-		
-		/// <summary>The line number of the first character of this Token. </summary>
-		public int beginLine;
-		/// <summary>The column number of the first character of this Token. </summary>
-		public int beginColumn;
-		/// <summary>The line number of the last character of this Token. </summary>
-		public int endLine;
-		/// <summary>The column number of the last character of this Token. </summary>
-		public int endColumn;
-		
-		/// <summary>The string image of the token.</summary>
-		public string image;
-		
-		/// <summary> 
-        /// A reference to the next regular (non-special) token from the input
-		/// stream.  If this is the last token from the input stream, or if the
-		/// token manager has not read tokens beyond this one, this field is
-		/// set to null.  This is true only if this token is also a regular
-		/// token.  Otherwise, see below for a description of the contents of
-		/// this field.
-		/// </summary>
-		public Token next;
-		
-		/// <summary> 
-        /// This field is used to access special tokens that occur prior to this
-		/// token, but after the immediately preceding regular (non-special) token.
-		/// If there are no such special tokens, this field is set to null.
-		/// When there are more than one such special token, this field refers
-		/// to the last of these special tokens, which in turn refers to the next
-		/// previous special token through its specialToken field, and so on
-		/// until the first special token (whose specialToken field is null).
-		/// The next fields of special tokens refer to other special tokens that
-		/// immediately follow it (without an intervening regular token).  If there
-		/// is no such token, this field is null.
-		/// </summary>
-		public Token specialToken;
-
-	    /// <summary> 
-        /// An optional attribute value of the Token.
-	    /// Tokens which are not used as syntactic sugar will often contain
-	    /// meaningful values that will be used later on by the compiler or
-	    /// interpreter. This attribute value is often different from the image.
-	    /// Any subclass of Token that actually wants to return a non-null value can
-	    /// override this method as appropriate.
-	    /// </summary>
-	    public virtual object Value
-	    {
-	        get { return null; }
-	    }
-
-	    /// <summary> 
-        /// No-argument constructor
-        /// </summary>
-		public Token()
-		{
-		}
-		
-		/// <summary> 
-        /// Constructs a new token for the specified Image.
-        /// </summary>
-		public Token(int kind)
-            : this(kind, null)
-		{
-		}
-		
-		/// <summary> 
-        /// Constructs a new token for the specified Image and Kind.
-        /// </summary>
-		public Token(int kind, string image)
-		{
-			this.kind = kind;
-			this.image = image;
-		}
-		
-		/// <summary> 
-        /// Returns the image.
-        /// </summary>
-		public override string ToString()
-		{
-			return image;
-		}
-		
-		/// <summary> 
-        /// Returns a new Token object, by default. However, if you want, you
-		/// can create and return subclass objects based on the value of ofKind.
-		/// Simply add the cases to the switch for all those special cases.
-		/// For example, if you have a subclass of Token called IDToken that
-		/// you want to create if ofKind is ID, simply add something like :
-		/// 
-		/// case MyParserConstants.ID : return new IDToken(ofKind, image);
-		/// 
-		/// to the following switch statement. Then you can cast matchedToken
-		/// variable to the appropriate type and use sit in your lexical actions.
-		/// </summary>
-		public static Token NewToken(int ofKind, string image)
-		{
-			switch (ofKind)
-			{
-				default:  return new Token(ofKind, image);
-			}
-		}
-		
-		public static Token NewToken(int ofKind)
-		{
-			return NewToken(ofKind, null);
-		}
-	}
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs b/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
deleted file mode 100644
index 2ccfc58..0000000
--- a/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
+++ /dev/null
@@ -1,170 +0,0 @@
-\ufeffusing System;
-using System.Text;
-
-namespace Lucene.Net.QueryParser.Surround.Parser
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-	/// <summary>Token Manager Error. </summary>
-	[Serializable]
-	public class TokenMgrError : Exception
-	{
-        /*
-		* Ordinals for various reasons why an Error of this type can be thrown.
-		*/
-
-        /// <summary> Lexical error occurred.</summary>
-        internal const int LEXICAL_ERROR = 0;
-
-        /// <summary> An attempt was made to create a second instance of a static token manager.</summary>
-        internal const int STATIC_LEXER_ERROR = 1;
-
-        /// <summary> Tried to change to an invalid lexical state.</summary>
-        internal const int INVALID_LEXICAL_STATE = 2;
-
-        /// <summary> Detected (and bailed out of) an infinite loop in the token manager.</summary>
-        internal const int LOOP_DETECTED = 3;
-
-        /// <summary> Indicates the reason why the exception is thrown. It will have
-        /// one of the above 4 values.
-        /// </summary>
-        internal int errorCode;
-
-        /// <summary> 
-        /// Replaces unprintable characters by their escaped (or unicode escaped)
-        /// equivalents in the given string
-        /// </summary>
-        protected internal static string AddEscapes(string str)
-        {
-            StringBuilder retval = new StringBuilder();
-            char ch;
-            for (int i = 0; i < str.Length; i++)
-            {
-                switch (str[i])
-                {
-
-                    case (char)(0):
-                        continue;
-
-                    case '\b':
-                        retval.Append("\\b");
-                        continue;
-
-                    case '\t':
-                        retval.Append("\\t");
-                        continue;
-
-                    case '\n':
-                        retval.Append("\\n");
-                        continue;
-
-                    case '\f':
-                        retval.Append("\\f");
-                        continue;
-
-                    case '\r':
-                        retval.Append("\\r");
-                        continue;
-
-                    case '\"':
-                        retval.Append("\\\"");
-                        continue;
-
-                    case '\'':
-                        retval.Append("\\\'");
-                        continue;
-
-                    case '\\':
-                        retval.Append("\\\\");
-                        continue;
-
-                    default:
-                        if ((ch = str[i]) < 0x20 || ch > 0x7e)
-                        {
-                            string s = "0000" + Convert.ToString(ch, 16);
-                            retval.Append("\\u" + s.Substring(s.Length - 4, (s.Length) - (s.Length - 4)));
-                        }
-                        else
-                        {
-                            retval.Append(ch);
-                        }
-                        continue;
-
-                }
-            }
-            return retval.ToString();
-        }
-
-        /// <summary>
-        /// Returns a detailed message for the Error when it is thrown by the
-        /// token manager to indicate a lexical error.
-        /// </summary>
-        /// <remarks>You can customize the lexical error message by modifying this method.</remarks>
-        /// <param name="EOFSeen">indicates if EOF caused the lexical error</param>
-        /// <param name="lexState">lexical state in which this error occurred</param>
-        /// <param name="errorLine">line number when the error occurred</param>
-        /// <param name="errorColumn">column number when the error occurred</param>
-        /// <param name="errorAfter">prefix that was seen before this error occurred</param>
-        /// <param name="curChar">the offending character</param>
-        /// <returns>Detailed error message</returns>
-        protected internal static string LexicalError(bool EOFSeen, int lexState, int errorLine, int errorColumn, string errorAfter, char curChar)
-        {
-            return ("Lexical error at line " +
-                errorLine + ", column " +
-                errorColumn + ".  Encountered: " +
-                (EOFSeen ? "<EOF> " : ("\"" + AddEscapes(Convert.ToString(curChar)) + "\"") + " (" + (int)curChar + "), ") +
-                "after : \"" + AddEscapes(errorAfter) + "\"");
-        }
-
-		/// <summary> 
-        /// You can also modify the body of this method to customize your error messages.
-		/// For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not
-		/// of end-users concern, so you can return something like :
-		/// 
-		/// "Internal Error : Please file a bug report .... "
-		/// 
-		/// from this method for such cases in the release version of your parser.
-		/// </summary>
-		public override string Message
-		{
-			get { return base.Message; }
-		}
-		
-		/*
-		* Constructors of various flavors follow.
-		*/
-		
-		/// <summary>No arg constructor. </summary>
-		public TokenMgrError()
-		{
-		}
-		
-		/// <summary>Constructor with message and reason. </summary>
-		public TokenMgrError(string message, int reason)
-            : base(message)
-		{
-			errorCode = reason;
-		}
-		
-		/// <summary>Full Constructor. </summary>
-		public TokenMgrError(bool EOFSeen, int lexState, int errorLine, int errorColumn, string errorAfter, char curChar, int reason)
-            : this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason)
-		{
-		}
-	}
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs b/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs
deleted file mode 100644
index aa00e0d..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs
+++ /dev/null
@@ -1,39 +0,0 @@
-\ufeffusing Lucene.Net.Search;
-using System.Collections.Generic;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Factory for conjunctions
-    /// </summary>
-    public class AndQuery : ComposedQuery
-    {
-        public AndQuery(IEnumerable<SrndQuery> queries, bool inf, string opName)
-            : base(queries, inf, opName)
-        {
-        }
-
-        public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
-        {
-            return SrndBooleanQuery.MakeBooleanQuery( /* subqueries can be individually boosted */
-              MakeLuceneSubQueriesField(fieldName, qf), BooleanClause.Occur.MUST);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs b/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
deleted file mode 100644
index 8992746..0000000
--- a/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
+++ /dev/null
@@ -1,110 +0,0 @@
-\ufeffusing Lucene.Net.Index;
-using Lucene.Net.Search;
-using Lucene.Net.Search.Spans;
-using System.Runtime.CompilerServices;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-
-     // Create basic queries to be used during rewrite.
-     // The basic queries are TermQuery and SpanTermQuery.
-     // An exception can be thrown when too many of these are used.
-     // SpanTermQuery and TermQuery use IndexReader.termEnum(Term), which causes the buffer usage.
-     
-     // Use this class to limit the buffer usage for reading terms from an index.
-     // Default is 1024, the same as the max. number of subqueries for a BooleanQuery.
-
-
-
-    /// <summary>
-    /// Factory for creating basic term queries
-    /// </summary>
-    public class BasicQueryFactory
-    {
-        public BasicQueryFactory(int maxBasicQueries)
-        {
-            this.maxBasicQueries = maxBasicQueries;
-            this.queriesMade = 0;
-        }
-
-        public BasicQueryFactory()
-            : this(1024)
-        {
-        }
-
-        private int maxBasicQueries;
-        private int queriesMade;
-
-        public int NrQueriesMade { get { return queriesMade; } }
-        public int MaxBasicQueries { get { return maxBasicQueries; } }
-
-        public override string ToString()
-        {
-            return GetType().Name
-                + "(maxBasicQueries: " + maxBasicQueries
-                + ", queriesMade: " + queriesMade
-                + ")";
-        }
-
-        private bool AtMax
-        {
-            get { return queriesMade >= maxBasicQueries; }
-        }
-
-        [MethodImpl(MethodImplOptions.Synchronized)]
-        protected virtual void CheckMax()
-        {
-            if (AtMax)
-                throw new TooManyBasicQueries(MaxBasicQueries);
-            queriesMade++;
-        }
-
-        public TermQuery NewTermQuery(Term term)
-        {
-            CheckMax();
-            return new TermQuery(term);
-        }
-
-        public SpanTermQuery NewSpanTermQuery(Term term)
-        {
-            CheckMax();
-            return new SpanTermQuery(term);
-        }
-
-        public override int GetHashCode()
-        {
-            return GetType().GetHashCode() ^ (AtMax ? 7 : 31 * 32);
-        }
-
-        /// <summary>
-        /// Two BasicQueryFactory's are equal when they generate
-        /// the same types of basic queries, or both cannot generate queries anymore.
-        /// </summary>
-        /// <param name="obj"></param>
-        /// <returns></returns>
-        public override bool Equals(object obj)
-        {
-            if (!(obj is BasicQueryFactory))
-                return false;
-            BasicQueryFactory other = (BasicQueryFactory)obj;
-            return AtMax == other.AtMax;
-        }
-    }
-}


[02/50] [abbrv] lucenenet git commit: Ported tests for the QueryParser.Classic namespace and refactored QueryParserTestBase so the test runner will run all of the tests.

Posted by sy...@apache.org.
Ported tests for the QueryParser.Classic namespace and refactored QueryParserTestBase so the test runner will run all of the tests.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/6d711567
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/6d711567
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/6d711567

Branch: refs/heads/master
Commit: 6d711567c20492c89fc59be440be8a56d918235c
Parents: ca1a374
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Jul 31 16:48:21 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:29:37 2016 +0700

----------------------------------------------------------------------
 .../Classic/TestMultiAnalyzer.cs                |  278 ++++
 .../Classic/TestMultiFieldQueryParser.cs        |  376 +++++
 .../Classic/TestMultiPhraseQueryParsing.cs      |  121 ++
 .../Classic/TestQueryParser.cs                  |  554 +++++++
 .../Lucene.Net.Tests.QueryParser.csproj         |   82 +
 .../Properties/AssemblyInfo.cs                  |   36 +
 .../Util/QueryParserTestBase.cs                 | 1499 ++++++++++++++++++
 Lucene.Net.Tests.QueryParser/packages.config    |    4 +
 Lucene.Net.sln                                  |   12 +
 9 files changed, 2962 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6d711567/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs b/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
new file mode 100644
index 0000000..fc1ce0c
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
@@ -0,0 +1,278 @@
+\ufeffusing System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using NUnit.Framework;
+using Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Search;
+using Lucene.Net.Util;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    [TestFixture]
+    public class TestMultiAnalyzer_ : BaseTokenStreamTestCase
+    {
+
+        private static int multiToken = 0;
+
+        [Test]
+        public void TestMultiAnalyzer()
+        {
+
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "", new MultiAnalyzer());
+
+            // trivial, no multiple tokens:
+            assertEquals("foo", qp.Parse("foo").toString());
+            assertEquals("foo", qp.Parse("\"foo\"").toString());
+            assertEquals("foo foobar", qp.Parse("foo foobar").toString());
+            assertEquals("\"foo foobar\"", qp.Parse("\"foo foobar\"").toString());
+            assertEquals("\"foo foobar blah\"", qp.Parse("\"foo foobar blah\"").toString());
+
+            // two tokens at the same position:
+            assertEquals("(multi multi2) foo", qp.Parse("multi foo").toString());
+            assertEquals("foo (multi multi2)", qp.Parse("foo multi").toString());
+            assertEquals("(multi multi2) (multi multi2)", qp.Parse("multi multi").toString());
+            assertEquals("+(foo (multi multi2)) +(bar (multi multi2))",
+                qp.Parse("+(foo multi) +(bar multi)").toString());
+            assertEquals("+(foo (multi multi2)) field:\"bar (multi multi2)\"",
+                qp.Parse("+(foo multi) field:\"bar multi\"").toString());
+
+            // phrases:
+            assertEquals("\"(multi multi2) foo\"", qp.Parse("\"multi foo\"").toString());
+            assertEquals("\"foo (multi multi2)\"", qp.Parse("\"foo multi\"").toString());
+            assertEquals("\"foo (multi multi2) foobar (multi multi2)\"",
+                qp.Parse("\"foo multi foobar multi\"").toString());
+
+            // fields:
+            assertEquals("(field:multi field:multi2) field:foo", qp.Parse("field:multi field:foo").toString());
+            assertEquals("field:\"(multi multi2) foo\"", qp.Parse("field:\"multi foo\"").toString());
+
+            // three tokens at one position:
+            assertEquals("triplemulti multi3 multi2", qp.Parse("triplemulti").toString());
+            assertEquals("foo (triplemulti multi3 multi2) foobar",
+                qp.Parse("foo triplemulti foobar").toString());
+
+            // phrase with non-default slop:
+            assertEquals("\"(multi multi2) foo\"~10", qp.Parse("\"multi foo\"~10").toString());
+
+            // phrase with non-default boost:
+            assertEquals("\"(multi multi2) foo\"^2.0", qp.Parse("\"multi foo\"^2").toString());
+
+            // phrase after changing default slop
+            qp.PhraseSlop=(99);
+            assertEquals("\"(multi multi2) foo\"~99 bar",
+                         qp.Parse("\"multi foo\" bar").toString());
+            assertEquals("\"(multi multi2) foo\"~99 \"foo bar\"~2",
+                         qp.Parse("\"multi foo\" \"foo bar\"~2").toString());
+            qp.PhraseSlop=(0);
+
+            // non-default operator:
+            qp.DefaultOperator=(QueryParserBase.AND_OPERATOR);
+            assertEquals("+(multi multi2) +foo", qp.Parse("multi foo").toString());
+
+        }
+
+        [Test]
+        public void TestMultiAnalyzerWithSubclassOfQueryParser()
+        {
+
+            DumbQueryParser qp = new DumbQueryParser("", new MultiAnalyzer());
+            qp.PhraseSlop = (99); // modified default slop
+
+            // direct call to (super's) getFieldQuery to demonstrate differnce
+            // between phrase and multiphrase with modified default slop
+            assertEquals("\"foo bar\"~99",
+                         qp.GetSuperFieldQuery("", "foo bar", true).toString());
+            assertEquals("\"(multi multi2) bar\"~99",
+                         qp.GetSuperFieldQuery("", "multi bar", true).toString());
+
+
+            // ask sublcass to parse phrase with modified default slop
+            assertEquals("\"(multi multi2) foo\"~99 bar",
+                         qp.Parse("\"multi foo\" bar").toString());
+
+        }
+
+        [Test]
+        public void TestPosIncrementAnalyzer()
+        {
+            QueryParser qp = new QueryParser(LuceneVersion.LUCENE_40, "", new PosIncrementAnalyzer());
+            assertEquals("quick brown", qp.Parse("the quick brown").toString());
+            assertEquals("quick brown fox", qp.Parse("the quick brown fox").toString());
+        }
+
+        /// <summary>
+        /// Expands "multi" to "multi" and "multi2", both at the same position,
+        /// and expands "triplemulti" to "triplemulti", "multi3", and "multi2".  
+        /// </summary>
+        private class MultiAnalyzer : Analyzer
+        {
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+                return new TokenStreamComponents(result, new TestFilter(result));
+            }
+        }
+
+        private sealed class TestFilter : TokenFilter
+        {
+
+            private string prevType;
+            private int prevStartOffset;
+            private int prevEndOffset;
+
+            private readonly ICharTermAttribute termAtt;
+            private readonly IPositionIncrementAttribute posIncrAtt;
+            private readonly IOffsetAttribute offsetAtt;
+            private readonly ITypeAttribute typeAtt;
+
+            public TestFilter(TokenStream @in)
+                : base(@in)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+                posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
+                offsetAtt = AddAttribute<IOffsetAttribute>();
+                typeAtt = AddAttribute<ITypeAttribute>();
+            }
+
+            public override bool IncrementToken()
+            {
+                if (multiToken > 0)
+                {
+                    termAtt.SetEmpty().Append("multi" + (multiToken + 1));
+                    offsetAtt.SetOffset(prevStartOffset, prevEndOffset);
+                    typeAtt.Type = (prevType);
+                    posIncrAtt.PositionIncrement = (0);
+                    multiToken--;
+                    return true;
+                }
+                else
+                {
+                    bool next = input.IncrementToken();
+                    if (!next)
+                    {
+                        return false;
+                    }
+                    prevType = typeAtt.Type;
+                    prevStartOffset = offsetAtt.StartOffset();
+                    prevEndOffset = offsetAtt.EndOffset();
+                    string text = termAtt.toString();
+                    if (text.equals("triplemulti"))
+                    {
+                        multiToken = 2;
+                        return true;
+                    }
+                    else if (text.equals("multi"))
+                    {
+                        multiToken = 1;
+                        return true;
+                    }
+                    else
+                    {
+                        return true;
+                    }
+                }
+            }
+
+            public override void Reset()
+            {
+                base.Reset();
+                this.prevType = null;
+                this.prevStartOffset = 0;
+                this.prevEndOffset = 0;
+            }
+        }
+
+        /// <summary>
+        /// Analyzes "the quick brown" as: quick(incr=2) brown(incr=1).
+        /// Does not work correctly for input other than "the quick brown ...".
+        /// </summary>
+        private class PosIncrementAnalyzer : Analyzer
+        {
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+                return new TokenStreamComponents(result, new TestPosIncrementFilter(result));
+            }
+        }
+
+        private sealed class TestPosIncrementFilter : TokenFilter
+        {
+            ICharTermAttribute termAtt;
+            IPositionIncrementAttribute posIncrAtt;
+
+            public TestPosIncrementFilter(TokenStream @in)
+                : base(@in)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+                posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
+            }
+
+            public override bool IncrementToken()
+            {
+                while (input.IncrementToken())
+                {
+                    if (termAtt.toString().equals("the"))
+                    {
+                        // stopword, do nothing
+                    }
+                    else if (termAtt.toString().equals("quick"))
+                    {
+                        posIncrAtt.PositionIncrement = (2);
+                        return true;
+                    }
+                    else
+                    {
+                        posIncrAtt.PositionIncrement = (1);
+                        return true;
+                    }
+                }
+                return false;
+            }
+        }
+
+        /// <summary>
+        /// a very simple subclass of QueryParser
+        /// </summary>
+        private sealed class DumbQueryParser : QueryParser
+        {
+            public DumbQueryParser(string f, Analyzer a)
+                : base(TEST_VERSION_CURRENT, f, a)
+            {
+            }
+
+            // expose super's version 
+            public Query GetSuperFieldQuery(string f, string t, bool quoted)
+            {
+                return base.GetFieldQuery(f, t, quoted);
+            }
+
+            // wrap super's version
+            protected override Query GetFieldQuery(string field, string queryText, bool quoted)
+            {
+                return new DumbQueryWrapper(GetSuperFieldQuery(field, queryText, quoted));
+            }
+        }
+
+        /// <summary>
+        /// A very simple wrapper to prevent instanceof checks but uses
+        /// the toString of the query it wraps.
+        /// </summary>
+        private sealed class DumbQueryWrapper : Query
+        {
+            private Query q;
+            public DumbQueryWrapper(Query q)
+            {
+                this.q = q;
+            }
+
+            public override string ToString(string field)
+            {
+                return q.ToString(field);
+            }
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6d711567/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs b/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs
new file mode 100644
index 0000000..f233c02
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs
@@ -0,0 +1,376 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+using System.IO;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestMultiFieldQueryParser : LuceneTestCase
+    {
+        /// <summary>
+        /// test stop words parsing for both the non static form, and for the 
+        /// corresponding static form (qtxt, fields[]).
+        /// </summary>
+        [Test]
+        public void TestStopwordsParsing()
+        {
+            AssertStopQueryEquals("one", "b:one t:one");
+            AssertStopQueryEquals("one stop", "b:one t:one");
+            AssertStopQueryEquals("one (stop)", "b:one t:one");
+            AssertStopQueryEquals("one ((stop))", "b:one t:one");
+            AssertStopQueryEquals("stop", "");
+            AssertStopQueryEquals("(stop)", "");
+            AssertStopQueryEquals("((stop))", "");
+        }
+
+        /// <summary>
+        /// verify parsing of query using a stopping analyzer  
+        /// </summary>
+        /// <param name="qtxt"></param>
+        /// <param name="expectedRes"></param>
+        private void AssertStopQueryEquals(string qtxt, string expectedRes)
+        {
+            string[] fields = { "b", "t" };
+            BooleanClause.Occur[] occur = new BooleanClause.Occur[] { BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD };
+            TestQueryParser.QPTestAnalyzer a = new TestQueryParser.QPTestAnalyzer();
+            MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, a);
+
+            Query q = mfqp.Parse(qtxt);
+            assertEquals(expectedRes, q.toString());
+
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, qtxt, fields, occur, a);
+            assertEquals(expectedRes, q.toString());
+        }
+
+        [Test]
+        public void TestSimple()
+        {
+            string[] fields = { "b", "t" };
+            MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new MockAnalyzer(Random()));
+
+            Query q = mfqp.Parse("one");
+            assertEquals("b:one t:one", q.toString());
+
+            q = mfqp.Parse("one two");
+            assertEquals("(b:one t:one) (b:two t:two)", q.toString());
+
+            q = mfqp.Parse("+one +two");
+            assertEquals("+(b:one t:one) +(b:two t:two)", q.toString());
+
+            q = mfqp.Parse("+one -two -three");
+            assertEquals("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q.toString());
+
+            q = mfqp.Parse("one^2 two");
+            assertEquals("((b:one t:one)^2.0) (b:two t:two)", q.toString());
+
+            q = mfqp.Parse("one~ two");
+            assertEquals("(b:one~2 t:one~2) (b:two t:two)", q.toString());
+
+            q = mfqp.Parse("one~0.8 two^2");
+            assertEquals("(b:one~0 t:one~0) ((b:two t:two)^2.0)", q.toString());
+
+            q = mfqp.Parse("one* two*");
+            assertEquals("(b:one* t:one*) (b:two* t:two*)", q.toString());
+
+            q = mfqp.Parse("[a TO c] two");
+            assertEquals("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.toString());
+
+            q = mfqp.Parse("w?ldcard");
+            assertEquals("b:w?ldcard t:w?ldcard", q.toString());
+
+            q = mfqp.Parse("\"foo bar\"");
+            assertEquals("b:\"foo bar\" t:\"foo bar\"", q.toString());
+
+            q = mfqp.Parse("\"aa bb cc\" \"dd ee\"");
+            assertEquals("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", q.toString());
+
+            q = mfqp.Parse("\"foo bar\"~4");
+            assertEquals("b:\"foo bar\"~4 t:\"foo bar\"~4", q.toString());
+
+            // LUCENE-1213: MultiFieldQueryParser was ignoring slop when phrase had a field.
+            q = mfqp.Parse("b:\"foo bar\"~4");
+            assertEquals("b:\"foo bar\"~4", q.toString());
+
+            // make sure that terms which have a field are not touched:
+            q = mfqp.Parse("one f:two");
+            assertEquals("(b:one t:one) f:two", q.toString());
+
+            // AND mode:
+            mfqp.DefaultOperator = QueryParserBase.AND_OPERATOR;
+            q = mfqp.Parse("one two");
+            assertEquals("+(b:one t:one) +(b:two t:two)", q.toString());
+            q = mfqp.Parse("\"aa bb cc\" \"dd ee\"");
+            assertEquals("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")", q.toString());
+        }
+
+        [Test]
+        public void TestBoostsSimple()
+        {
+            IDictionary<string, float> boosts = new Dictionary<string, float>();
+            boosts["b"] = (float)5;
+            boosts["t"] = (float)10;
+            string[] fields = { "b", "t" };
+            MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new MockAnalyzer(Random()), boosts);
+
+
+            //Check for simple
+            Query q = mfqp.Parse("one");
+            assertEquals("b:one^5.0 t:one^10.0", q.toString());
+
+            //Check for AND
+            q = mfqp.Parse("one AND two");
+            assertEquals("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0)", q.toString());
+
+            //Check for OR
+            q = mfqp.Parse("one OR two");
+            assertEquals("(b:one^5.0 t:one^10.0) (b:two^5.0 t:two^10.0)", q.toString());
+
+            //Check for AND and a field
+            q = mfqp.Parse("one AND two AND foo:test");
+            assertEquals("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0) +foo:test", q.toString());
+
+            q = mfqp.Parse("one^3 AND two^4");
+            assertEquals("+((b:one^5.0 t:one^10.0)^3.0) +((b:two^5.0 t:two^10.0)^4.0)", q.toString());
+        }
+
+        [Test]
+        public void TestStaticMethod1()
+        {
+            string[] fields = { "b", "t" };
+            string[] queries = { "one", "two" };
+            Query q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries, fields, new MockAnalyzer(Random()));
+            assertEquals("b:one t:two", q.toString());
+
+            string[] queries2 = { "+one", "+two" };
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries2, fields, new MockAnalyzer(Random()));
+            assertEquals("(+b:one) (+t:two)", q.toString());
+
+            string[] queries3 = { "one", "+two" };
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries3, fields, new MockAnalyzer(Random()));
+            assertEquals("b:one (+t:two)", q.toString());
+
+            string[] queries4 = { "one +more", "+two" };
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries4, fields, new MockAnalyzer(Random()));
+            assertEquals("(b:one +b:more) (+t:two)", q.toString());
+
+            string[] queries5 = { "blah" };
+            try
+            {
+                q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries5, fields, new MockAnalyzer(Random()));
+                fail();
+            }
+            catch (ArgumentException e)
+            {
+                // expected exception, array length differs
+            }
+
+            // check also with stop words for this static form (qtxts[], fields[]).
+            TestQueryParser.QPTestAnalyzer stopA = new TestQueryParser.QPTestAnalyzer();
+
+            string[] queries6 = { "((+stop))", "+((stop))" };
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries6, fields, stopA);
+            assertEquals("", q.toString());
+
+            string[] queries7 = { "one ((+stop)) +more", "+((stop)) +two" };
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries7, fields, stopA);
+            assertEquals("(b:one +b:more) (+t:two)", q.toString());
+        }
+
+        [Test]
+        public void TestStaticMethod2()
+        {
+            string[] fields = { "b", "t" };
+            BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT };
+            Query q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "one", fields, flags, new MockAnalyzer(Random()));
+            assertEquals("+b:one -t:one", q.toString());
+
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "one two", fields, flags, new MockAnalyzer(Random()));
+            assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
+
+            try
+            {
+                BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+                q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new MockAnalyzer(Random()));
+                fail();
+            }
+            catch (ArgumentException e)
+            {
+                // expected exception, array length differs
+            }
+        }
+
+        [Test]
+        public void TestStaticMethod2Old()
+        {
+            string[] fields = { "b", "t" };
+            //int[] flags = {MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD};
+            BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT };
+
+            Query q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "one", fields, flags, new MockAnalyzer(Random()));//, fields, flags, new MockAnalyzer(random));
+            assertEquals("+b:one -t:one", q.toString());
+
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "one two", fields, flags, new MockAnalyzer(Random()));
+            assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
+
+            try
+            {
+                BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+                q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new MockAnalyzer(Random()));
+                fail();
+            }
+            catch (ArgumentException e)
+            {
+                // expected exception, array length differs
+            }
+        }
+
+        [Test]
+        public void TestStaticMethod3()
+        {
+            string[] queries = { "one", "two", "three" };
+            string[] fields = { "f1", "f2", "f3" };
+            BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST,
+                BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD};
+            Query q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries, fields, flags, new MockAnalyzer(Random()));
+            assertEquals("+f1:one -f2:two f3:three", q.toString());
+
+            try
+            {
+                BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+                q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries, fields, flags2, new MockAnalyzer(Random()));
+                fail();
+            }
+            catch (ArgumentException e)
+            {
+                // expected exception, array length differs
+            }
+        }
+
+        [Test]
+        public void TestStaticMethod3Old()
+        {
+            string[] queries = { "one", "two" };
+            string[] fields = { "b", "t" };
+            BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT };
+            Query q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries, fields, flags, new MockAnalyzer(Random()));
+            assertEquals("+b:one -t:two", q.toString());
+
+            try
+            {
+                BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+                q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries, fields, flags2, new MockAnalyzer(Random()));
+                fail();
+            }
+            catch (ArgumentException e)
+            {
+                // expected exception, array length differs
+            }
+        }
+
+        [Test]
+        public void TestAnalyzerReturningNull()
+        {
+            string[] fields = new string[] { "f1", "f2", "f3" };
+            MultiFieldQueryParser parser = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new AnalyzerReturningNull());
+            Query q = parser.Parse("bla AND blo");
+            assertEquals("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.toString());
+            // the following queries are not affected as their terms are not analyzed anyway:
+            q = parser.Parse("bla*");
+            assertEquals("f1:bla* f2:bla* f3:bla*", q.toString());
+            q = parser.Parse("bla~");
+            assertEquals("f1:bla~2 f2:bla~2 f3:bla~2", q.toString());
+            q = parser.Parse("[a TO c]");
+            assertEquals("f1:[a TO c] f2:[a TO c] f3:[a TO c]", q.toString());
+        }
+
+        [Test]
+        public void TestStopWordSearching()
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+            using (var ramDir = NewDirectory())
+            {
+                using (IndexWriter iw = new IndexWriter(ramDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)))
+                {
+                    Document doc = new Document();
+                    doc.Add(NewTextField("body", "blah the footest blah", Field.Store.NO));
+                    iw.AddDocument(doc);
+                }
+
+                MultiFieldQueryParser mfqp =
+                  new MultiFieldQueryParser(TEST_VERSION_CURRENT, new string[] { "body" }, analyzer);
+                mfqp.DefaultOperator = QueryParser.Operator.AND;
+                Query q = mfqp.Parse("the footest");
+                using (IndexReader ir = DirectoryReader.Open(ramDir))
+                {
+                    IndexSearcher @is = NewSearcher(ir);
+                    ScoreDoc[] hits = @is.Search(q, null, 1000).ScoreDocs;
+                    assertEquals(1, hits.Length);
+                }
+            }
+        }
+
+        private class AnalyzerReturningNull : Analyzer
+        {
+            MockAnalyzer stdAnalyzer = new MockAnalyzer(Random());
+
+            public AnalyzerReturningNull()
+                : base(PER_FIELD_REUSE_STRATEGY)
+            { }
+
+            public override System.IO.TextReader InitReader(string fieldName, TextReader reader)
+            {
+                if ("f1".equals(fieldName))
+                {
+                    // we don't use the reader, so close it:
+                    IOUtils.CloseWhileHandlingException(reader);
+                    // return empty reader, so MockTokenizer returns no tokens:
+                    return new StringReader("");
+                }
+                else
+                {
+                    return base.InitReader(fieldName, reader);
+                }
+            }
+
+            public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                return stdAnalyzer.CreateComponents(fieldName, reader);
+            }
+        }
+
+        [Test]
+        public void TestSimpleRegex()
+        {
+            string[] fields = new string[] { "a", "b" };
+            MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new MockAnalyzer(Random()));
+
+            BooleanQuery bq = new BooleanQuery(true);
+            bq.Add(new RegexpQuery(new Term("a", "[a-z][123]")), BooleanClause.Occur.SHOULD);
+            bq.Add(new RegexpQuery(new Term("b", "[a-z][123]")), BooleanClause.Occur.SHOULD);
+            assertEquals(bq, mfqp.Parse("/[a-z][123]/"));
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6d711567/Lucene.Net.Tests.QueryParser/Classic/TestMultiPhraseQueryParsing.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Classic/TestMultiPhraseQueryParsing.cs b/Lucene.Net.Tests.QueryParser/Classic/TestMultiPhraseQueryParsing.cs
new file mode 100644
index 0000000..3aaa9b2
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Classic/TestMultiPhraseQueryParsing.cs
@@ -0,0 +1,121 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Util;
+using NUnit.Framework;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestMultiPhraseQueryParsing_ : LuceneTestCase
+    {
+        private class TokenAndPos
+        {
+            public readonly string token;
+            public readonly int pos;
+            public TokenAndPos(string token, int pos)
+            {
+                this.token = token;
+                this.pos = pos;
+            }
+        }
+
+        private class CannedAnalyzer : Analyzer
+        {
+            private readonly TokenAndPos[] tokens;
+
+            public CannedAnalyzer(TokenAndPos[] tokens)
+            {
+                this.tokens = tokens;
+            }
+
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                return new TokenStreamComponents(new CannedTokenizer(reader, tokens));
+            }
+        }
+
+        private class CannedTokenizer : Tokenizer
+        {
+            private readonly TokenAndPos[] tokens;
+            private int upto = 0;
+            private int lastPos = 0;
+            private readonly ICharTermAttribute termAtt;
+            private readonly IPositionIncrementAttribute posIncrAtt;
+
+            public CannedTokenizer(System.IO.TextReader reader, TokenAndPos[] tokens)
+                : base(reader)
+            {
+                this.tokens = tokens;
+                this.termAtt = AddAttribute<ICharTermAttribute>();
+                this.posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
+            }
+
+            public override sealed bool IncrementToken()
+            {
+                ClearAttributes();
+                if (upto < tokens.Length)
+                {
+                    TokenAndPos token = tokens[upto++];
+                    termAtt.SetEmpty();
+                    termAtt.Append(token.token);
+                    posIncrAtt.PositionIncrement = (token.pos - lastPos);
+                    lastPos = token.pos;
+                    return true;
+                }
+                else
+                {
+                    return false;
+                }
+            }
+            public override void Reset()
+            {
+                base.Reset();
+                this.upto = 0;
+                this.lastPos = 0;
+            }
+        }
+
+        [Test]
+        public void TestMultiPhraseQueryParsing()
+        {
+            TokenAndPos[] INCR_0_QUERY_TOKENS_AND = new TokenAndPos[]
+            {
+                new TokenAndPos("a", 0),
+                new TokenAndPos("1", 0),
+                new TokenAndPos("b", 1),
+                new TokenAndPos("1", 1),
+                new TokenAndPos("c", 2)
+            };
+
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new CannedAnalyzer(INCR_0_QUERY_TOKENS_AND));
+            Query q = qp.Parse("\"this text is acually ignored\"");
+            assertTrue("wrong query type!", q is MultiPhraseQuery);
+
+            MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery();
+            multiPhraseQuery.Add(new Term[] { new Term("field", "a"), new Term("field", "1") }, -1);
+            multiPhraseQuery.Add(new Term[] { new Term("field", "b"), new Term("field", "1") }, 0);
+            multiPhraseQuery.Add(new Term[] { new Term("field", "c") }, 1);
+
+            assertEquals(multiPhraseQuery, q);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6d711567/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs b/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
new file mode 100644
index 0000000..0130bca
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
@@ -0,0 +1,554 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Documents;
+using Lucene.Net.QueryParser.Flexible.Standard;
+using Lucene.Net.QueryParser.Util;
+using Lucene.Net.Search;
+using Lucene.Net.Support;
+using NUnit.Framework;
+using System;
+using System.Diagnostics;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestQueryParser : QueryParserTestBase
+    {
+        public class QPTestParser : QueryParser
+        {
+            public QPTestParser(string f, Analyzer a)
+                : base(TEST_VERSION_CURRENT, f, a)
+            {
+            }
+
+            protected override Query GetFuzzyQuery(string field, string termStr, float minSimilarity)
+            {
+                throw new ParseException("Fuzzy queries not allowed");
+            }
+
+            protected override Query GetWildcardQuery(string field, string termStr)
+            {
+                throw new ParseException("Wildcard queries not allowed");
+            }
+
+        }
+
+        public QueryParser GetParser(Analyzer a)
+        {
+            if (a == null) a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, DefaultField, a);
+            qp.DefaultOperator = (QueryParserBase.OR_OPERATOR);
+            return qp;
+        }
+
+        public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a)
+        {
+            return GetParser(a);
+        }
+
+        public override Query GetQuery(string query, ICommonQueryParserConfiguration cqpC)
+        {
+            Debug.Assert(cqpC != null, "Parameter must not be null");
+            Debug.Assert(cqpC is QueryParser, "Parameter must be instance of QueryParser");
+            QueryParser qp = (QueryParser)cqpC;
+            return qp.Parse(query);
+        }
+
+        public override Query GetQuery(string query, Analyzer a)
+        {
+            return GetParser(a).Parse(query);
+        }
+
+        public override bool IsQueryParserException(Exception exception)
+        {
+            return exception is ParseException;
+        }
+
+        public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC)
+        {
+            Debug.Assert(cqpC is QueryParser);
+            QueryParser qp = (QueryParser)cqpC;
+            qp.DefaultOperator = QueryParserBase.Operator.OR;
+        }
+
+        public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC)
+        {
+            Debug.Assert(cqpC is QueryParser);
+            QueryParser qp = (QueryParser)cqpC;
+            qp.DefaultOperator = QueryParserBase.Operator.AND;
+        }
+
+        public override void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC, bool value)
+        {
+            Debug.Assert(cqpC is QueryParser);
+            QueryParser qp = (QueryParser)cqpC;
+            qp.AnalyzeRangeTerms = (value);
+        }
+
+        public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC, bool value)
+        {
+            Debug.Assert(cqpC is QueryParser);
+            QueryParser qp = (QueryParser)cqpC;
+            qp.AutoGeneratePhraseQueries = value;
+        }
+
+        public override void SetDateResolution(ICommonQueryParserConfiguration cqpC, ICharSequence field, DateTools.Resolution value)
+        {
+            Debug.Assert(cqpC is QueryParser);
+            QueryParser qp = (QueryParser)cqpC;
+            qp.SetDateResolution(field.toString(), value);
+        }
+
+        [Test]
+        public override void TestDefaultOperator()
+        {
+            QueryParser qp = GetParser(new MockAnalyzer(Random()));
+            // make sure OR is the default:
+            assertEquals(QueryParserBase.OR_OPERATOR, qp.DefaultOperator);
+            SetDefaultOperatorAND(qp);
+            assertEquals(QueryParserBase.AND_OPERATOR, qp.DefaultOperator);
+            SetDefaultOperatorOR(qp);
+            assertEquals(QueryParserBase.OR_OPERATOR, qp.DefaultOperator);
+        }
+
+        // LUCENE-2002: when we run javacc to regen QueryParser,
+        // we also run a replaceregexp step to fix 2 of the public
+        // ctors (change them to protected):
+        //
+        // protected QueryParser(CharStream stream)
+        //
+        // protected QueryParser(QueryParserTokenManager tm)
+        //
+        // This test is here as a safety, in case that ant step
+        // doesn't work for some reason.
+        [Test]
+        public void TestProtectedCtors()
+        {
+            try
+            {
+                typeof(QueryParser).GetConstructor(new Type[] { typeof(ICharStream) });
+                fail("please switch public QueryParser(CharStream) to be protected");
+            }
+            catch (Exception nsme)
+            {
+                // expected
+            }
+            try
+            {
+                typeof(QueryParser).GetConstructor(new Type[] { typeof(QueryParserTokenManager) });
+                fail("please switch public QueryParser(QueryParserTokenManager) to be protected");
+            }
+            catch (Exception nsme)
+            {
+                // expected
+            }
+        }
+
+        private class TestFuzzySlopeExtendabilityQueryParser : QueryParser
+        {
+            public TestFuzzySlopeExtendabilityQueryParser()
+                : base(TEST_VERSION_CURRENT, "a", new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false))
+            {}
+
+            protected override Query HandleBareFuzzy(string qfield, Token fuzzySlop, string termImage)
+            {
+                if (fuzzySlop.image.EndsWith("\u20ac"))
+                {
+                    float fms = FuzzyMinSim;
+                    try
+                    {
+                        fms = float.Parse(fuzzySlop.image.Substring(1, fuzzySlop.image.Length - 1));
+                    }
+                    catch (Exception ignored) { }
+                    float value = float.Parse(termImage);
+                    return GetRangeQuery(qfield, (value - fms / 2.0f).ToString(), (value + fms / 2.0f).ToString(), true, true);
+                }
+                return base.HandleBareFuzzy(qfield, fuzzySlop, termImage);
+            }
+        }
+
+        [Test]
+        public void TestFuzzySlopeExtendability()
+        {
+            QueryParser qp = new TestFuzzySlopeExtendabilityQueryParser();
+            assertEquals(qp.Parse("a:[11.95 TO 12.95]"), qp.Parse("12.45~1\u20ac"));
+        }
+
+        private class TestStarParsingQueryParser : QueryParser
+        {
+            public readonly int[] type = new int[1];
+
+            public TestStarParsingQueryParser()
+                : base(TEST_VERSION_CURRENT, "field", new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false))
+            { }
+
+            protected override Query GetWildcardQuery(string field, string termStr)
+            {
+                // override error checking of superclass
+                type[0] = 1;
+                return new TermQuery(new Index.Term(field, termStr));
+            }
+
+            protected override Query GetPrefixQuery(string field, string termStr)
+            {
+                // override error checking of superclass
+                type[0] = 2;
+                return new TermQuery(new Index.Term(field, termStr));
+            }
+
+            protected override Query GetFieldQuery(string field, string queryText, bool quoted)
+            {
+                type[0] = 3;
+                return base.GetFieldQuery(field, queryText, quoted);
+            }
+        }
+
+        [Test]
+        public override void TestStarParsing()
+        {
+            TestStarParsingQueryParser qp = new TestStarParsingQueryParser();
+
+            TermQuery tq;
+
+            tq = (TermQuery)qp.Parse("foo:zoo*");
+            assertEquals("zoo", tq.Term.Text());
+            assertEquals(2, qp.type[0]);
+
+            tq = (TermQuery)qp.Parse("foo:zoo*^2");
+            assertEquals("zoo", tq.Term.Text());
+            assertEquals(2, qp.type[0]);
+            assertEquals(tq.Boost, 2, 0);
+
+            tq = (TermQuery)qp.Parse("foo:*");
+            assertEquals("*", tq.Term.Text());
+            assertEquals(1, qp.type[0]); // could be a valid prefix query in the future too
+
+            tq = (TermQuery)qp.Parse("foo:*^2");
+            assertEquals("*", tq.Term.Text());
+            assertEquals(1, qp.type[0]);
+            assertEquals(tq.Boost, 2, 0);
+
+            tq = (TermQuery)qp.Parse("*:foo");
+            assertEquals("*", tq.Term.Field);
+            assertEquals("foo", tq.Term.Text());
+            assertEquals(3, qp.type[0]);
+
+            tq = (TermQuery)qp.Parse("*:*");
+            assertEquals("*", tq.Term.Field);
+            assertEquals("*", tq.Term.Text());
+            assertEquals(1, qp.type[0]); // could be handled as a prefix query in the
+            // future
+
+            tq = (TermQuery)qp.Parse("(*:*)");
+            assertEquals("*", tq.Term.Field);
+            assertEquals("*", tq.Term.Text());
+            assertEquals(1, qp.type[0]);
+        }
+
+        [Test]
+        public void TestCustomQueryParserWildcard()
+        {
+            try
+            {
+                new QPTestParser("contents", new MockAnalyzer(Random(),
+                    MockTokenizer.WHITESPACE, false)).Parse("a?t");
+                fail("Wildcard queries should not be allowed");
+            }
+            catch (ParseException expected)
+            {
+                // expected exception
+            }
+        }
+
+        [Test]
+        public void TestCustomQueryParserFuzzy()
+        {
+            try
+            {
+                new QPTestParser("contents", new MockAnalyzer(Random(),
+                    MockTokenizer.WHITESPACE, false)).Parse("xunit~");
+                fail("Fuzzy queries should not be allowed");
+            }
+            catch (ParseException expected)
+            {
+                // expected exception
+            }
+        }
+
+        /// <summary>
+        /// query parser that doesn't expand synonyms when users use double quotes
+        /// </summary>
+        private class SmartQueryParser : QueryParser
+        {
+            Analyzer morePrecise = new Analyzer2();
+
+            public SmartQueryParser()
+                : base(TEST_VERSION_CURRENT, "field", new Analyzer1())
+            {
+            }
+
+            protected override Query GetFieldQuery(string field, string queryText, bool quoted)
+            {
+                if (quoted) return NewFieldQuery(morePrecise, field, queryText, quoted);
+                else return base.GetFieldQuery(field, queryText, quoted);
+            }
+        }
+
+        public override void TestNewFieldQuery()
+        {
+            /** ordinary behavior, synonyms form uncoordinated boolean query */
+            QueryParser dumb = new QueryParser(TEST_VERSION_CURRENT, "field",
+                new Analyzer1());
+            BooleanQuery expanded = new BooleanQuery(true);
+            expanded.Add(new TermQuery(new Index.Term("field", "dogs")),
+                BooleanClause.Occur.SHOULD);
+            expanded.Add(new TermQuery(new Index.Term("field", "dog")),
+                BooleanClause.Occur.SHOULD);
+            assertEquals(expanded, dumb.Parse("\"dogs\""));
+            /** even with the phrase operator the behavior is the same */
+            assertEquals(expanded, dumb.Parse("dogs"));
+
+            /**
+             * custom behavior, the synonyms are expanded, unless you use quote operator
+             */
+            QueryParser smart = new SmartQueryParser();
+            assertEquals(expanded, smart.Parse("dogs"));
+
+            Query unexpanded = new TermQuery(new Index.Term("field", "dogs"));
+            assertEquals(unexpanded, smart.Parse("\"dogs\""));
+        }
+
+        // LUCENETODO: fold these into QueryParserTestBase
+
+        /// <summary>
+        /// adds synonym of "dog" for "dogs".
+        /// </summary>
+        public class MockSynonymAnalyzer : Analyzer
+        {
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                MockTokenizer tokenizer = new MockTokenizer(reader);
+                return new TokenStreamComponents(tokenizer, new MockSynonymFilter(tokenizer));
+            }
+        }
+
+        /// <summary>
+        /// simple synonyms test
+        /// </summary>
+        [Test]
+        public void TestSynonyms()
+        {
+            BooleanQuery expected = new BooleanQuery(true);
+            expected.Add(new TermQuery(new Index.Term("field", "dogs")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Index.Term("field", "dog")), BooleanClause.Occur.SHOULD);
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockSynonymAnalyzer());
+            assertEquals(expected, qp.Parse("dogs"));
+            assertEquals(expected, qp.Parse("\"dogs\""));
+            qp.DefaultOperator = (QueryParserBase.Operator.AND);
+            assertEquals(expected, qp.Parse("dogs"));
+            assertEquals(expected, qp.Parse("\"dogs\""));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("dogs^2"));
+            assertEquals(expected, qp.Parse("\"dogs\"^2"));
+        }
+
+        /// <summary>
+        /// forms multiphrase query
+        /// </summary>
+        [Test]
+        public void TestSynonymsPhrase()
+        {
+            MultiPhraseQuery expected = new MultiPhraseQuery();
+            expected.Add(new Index.Term("field", "old"));
+            expected.Add(new Index.Term[] { new Index.Term("field", "dogs"), new Index.Term("field", "dog") });
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockSynonymAnalyzer());
+            assertEquals(expected, qp.Parse("\"old dogs\""));
+            qp.DefaultOperator = (QueryParserBase.Operator.AND);
+            assertEquals(expected, qp.Parse("\"old dogs\""));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("\"old dogs\"^2"));
+            expected.Slop = (3);
+            assertEquals(expected, qp.Parse("\"old dogs\"~3^2"));
+        }
+
+        /// <summary>
+        /// adds synonym of "\u570b" for "\u56fd".
+        /// </summary>
+        protected class MockCJKSynonymFilter : TokenFilter
+        {
+            ICharTermAttribute termAtt;
+            IPositionIncrementAttribute posIncAtt;
+            bool addSynonym = false;
+
+            public MockCJKSynonymFilter(TokenStream input)
+                : base(input)
+            {
+                ICharTermAttribute termAtt = AddAttribute<ICharTermAttribute>();
+                IPositionIncrementAttribute posIncAtt = AddAttribute<IPositionIncrementAttribute>();
+            }
+
+            public override bool IncrementToken()
+            {
+                if (addSynonym)
+                { // inject our synonym
+                    ClearAttributes();
+                    termAtt.SetEmpty().Append("\u570b");
+                    posIncAtt.PositionIncrement = (0);
+                    addSynonym = false;
+                    return true;
+                }
+
+                if (input.IncrementToken())
+                {
+                    addSynonym = termAtt.toString().equals("\u56fd");
+                    return true;
+                }
+                else
+                {
+                    return false;
+                }
+            }
+        }
+
+        protected class MockCJKSynonymAnalyzer : Analyzer
+        {
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                Tokenizer tokenizer = new SimpleCJKTokenizer(reader);
+                return new TokenStreamComponents(tokenizer, new MockCJKSynonymFilter(tokenizer));
+            }
+        }
+
+        /// <summary>
+        /// simple CJK synonym test
+        /// </summary>
+        [Test]
+        public void TestCJKSynonym()
+        {
+            BooleanQuery expected = new BooleanQuery(true);
+            expected.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
+            assertEquals(expected, qp.Parse("\u56fd"));
+            qp.DefaultOperator = (QueryParserBase.Operator.AND);
+            assertEquals(expected, qp.Parse("\u56fd"));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("\u56fd^2"));
+        }
+
+        /// <summary>
+        /// synonyms with default OR operator 
+        /// </summary>
+        [Test]
+        public void TestCJKSynonymsOR()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Index.Term("field", "\u4e2d")), BooleanClause.Occur.SHOULD);
+            BooleanQuery inner = new BooleanQuery(true);
+            inner.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+            inner.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner, BooleanClause.Occur.SHOULD);
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd"));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd^2"));
+        }
+
+        /// <summary>
+        /// more complex synonyms with default OR operator
+        /// </summary>
+        [Test]
+        public void TestCJKSynonymsOR2()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Index.Term("field", "\u4e2d")), BooleanClause.Occur.SHOULD);
+            BooleanQuery inner = new BooleanQuery(true);
+            inner.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+            inner.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner, BooleanClause.Occur.SHOULD);
+            BooleanQuery inner2 = new BooleanQuery(true);
+            inner2.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+            inner2.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner2, BooleanClause.Occur.SHOULD);
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd\u56fd"));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd\u56fd^2"));
+        }
+
+        /// <summary>
+        /// synonyms with default AND operator
+        /// </summary>
+        [Test]
+        public void TestCJKSynonymsAND()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Index.Term("field", "\u4e2d")), BooleanClause.Occur.MUST);
+            BooleanQuery inner = new BooleanQuery(true);
+            inner.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+            inner.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner, BooleanClause.Occur.MUST);
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
+            qp.DefaultOperator = (QueryParserBase.Operator.AND);
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd"));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd^2"));
+        }
+
+        /// <summary>
+        /// more complex synonyms with default AND operator
+        /// </summary>
+        [Test]
+        public void TestCJKSynonymsAND2()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Index.Term("field", "\u4e2d")), BooleanClause.Occur.MUST);
+            BooleanQuery inner = new BooleanQuery(true);
+            inner.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+            inner.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner, BooleanClause.Occur.MUST);
+            BooleanQuery inner2 = new BooleanQuery(true);
+            inner2.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+            inner2.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner2, BooleanClause.Occur.MUST);
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
+            qp.DefaultOperator = (QueryParserBase.Operator.AND);
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd\u56fd"));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd\u56fd^2"));
+        }
+
+        [Test]
+        public void TestCJKSynonymsPhrase()
+        {
+            MultiPhraseQuery expected = new MultiPhraseQuery();
+            expected.Add(new Index.Term("field", "\u4e2d"));
+            expected.Add(new Index.Term[] { new Index.Term("field", "\u56fd"), new Index.Term("field", "\u570b") });
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
+            qp.DefaultOperator = (QueryParserBase.Operator.AND);
+            assertEquals(expected, qp.Parse("\"\u4e2d\u56fd\""));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("\"\u4e2d\u56fd\"^2"));
+            expected.Slop = (3);
+            assertEquals(expected, qp.Parse("\"\u4e2d\u56fd\"~3^2"));
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6d711567/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj b/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
new file mode 100644
index 0000000..99a5e91
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
@@ -0,0 +1,82 @@
+\ufeff<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
+  <PropertyGroup>
+    <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+    <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
+    <ProjectGuid>{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}</ProjectGuid>
+    <OutputType>Library</OutputType>
+    <AppDesignerFolder>Properties</AppDesignerFolder>
+    <RootNamespace>Lucene.Net.Tests.QueryParser</RootNamespace>
+    <AssemblyName>Lucene.Net.Tests.QueryParser</AssemblyName>
+    <TargetFrameworkVersion>v4.5.1</TargetFrameworkVersion>
+    <FileAlignment>512</FileAlignment>
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
+    <DebugSymbols>true</DebugSymbols>
+    <DebugType>full</DebugType>
+    <Optimize>false</Optimize>
+    <OutputPath>bin\Debug\</OutputPath>
+    <DefineConstants>DEBUG;TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+    <DebugType>pdbonly</DebugType>
+    <Optimize>true</Optimize>
+    <OutputPath>bin\Release\</OutputPath>
+    <DefineConstants>TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+  </PropertyGroup>
+  <ItemGroup>
+    <Reference Include="nunit.framework, Version=2.6.3.13283, Culture=neutral, PublicKeyToken=96d09a1eb7f44a77, processorArchitecture=MSIL">
+      <HintPath>..\packages\NUnit.2.6.3\lib\nunit.framework.dll</HintPath>
+      <Private>True</Private>
+    </Reference>
+    <Reference Include="System" />
+    <Reference Include="System.Core" />
+    <Reference Include="System.Xml.Linq" />
+    <Reference Include="System.Data.DataSetExtensions" />
+    <Reference Include="Microsoft.CSharp" />
+    <Reference Include="System.Data" />
+    <Reference Include="System.Xml" />
+  </ItemGroup>
+  <ItemGroup>
+    <Compile Include="Classic\TestMultiFieldQueryParser.cs" />
+    <Compile Include="Classic\TestMultiPhraseQueryParsing.cs" />
+    <Compile Include="Classic\TestQueryParser.cs" />
+    <Compile Include="Properties\AssemblyInfo.cs" />
+    <Compile Include="Classic\TestMultiAnalyzer.cs" />
+    <Compile Include="Util\QueryParserTestBase.cs" />
+  </ItemGroup>
+  <ItemGroup>
+    <None Include="packages.config" />
+  </ItemGroup>
+  <ItemGroup>
+    <ProjectReference Include="..\Lucene.Net.QueryParser\Lucene.Net.QueryParser.csproj">
+      <Project>{949ba34b-6ae6-4ce3-b578-61e13e4d76bf}</Project>
+      <Name>Lucene.Net.QueryParser</Name>
+    </ProjectReference>
+    <ProjectReference Include="..\src\Lucene.Net.Analysis.Common\Lucene.Net.Analysis.Common.csproj">
+      <Project>{4add0bbc-b900-4715-9526-d871de8eea64}</Project>
+      <Name>Lucene.Net.Analysis.Common</Name>
+    </ProjectReference>
+    <ProjectReference Include="..\src\Lucene.Net.Core\Lucene.Net.csproj">
+      <Project>{5d4ad9be-1ffb-41ab-9943-25737971bf57}</Project>
+      <Name>Lucene.Net</Name>
+    </ProjectReference>
+    <ProjectReference Include="..\src\Lucene.Net.TestFramework\Lucene.Net.TestFramework.csproj">
+      <Project>{b2c0d749-ce34-4f62-a15e-00cb2ff5ddb3}</Project>
+      <Name>Lucene.Net.TestFramework</Name>
+    </ProjectReference>
+  </ItemGroup>
+  <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
+  <!-- To modify your build process, add your task inside one of the targets below and uncomment it. 
+       Other similar extension points exist, see Microsoft.Common.targets.
+  <Target Name="BeforeBuild">
+  </Target>
+  <Target Name="AfterBuild">
+  </Target>
+  -->
+</Project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6d711567/Lucene.Net.Tests.QueryParser/Properties/AssemblyInfo.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Properties/AssemblyInfo.cs b/Lucene.Net.Tests.QueryParser/Properties/AssemblyInfo.cs
new file mode 100644
index 0000000..549c7bf
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Properties/AssemblyInfo.cs
@@ -0,0 +1,36 @@
+\ufeffusing System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following 
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("Lucene.Net.Tests.QueryParser")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("Lucene.Net.Tests.QueryParser")]
+[assembly: AssemblyCopyright("Copyright �  2016")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible 
+// to COM components.  If you need to access a type in this assembly from 
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("27d0ae76-3e51-454c-9c4a-f913fde0ed0a")]
+
+// Version information for an assembly consists of the following four values:
+//
+//      Major Version
+//      Minor Version 
+//      Build Number
+//      Revision
+//
+// You can specify all the values or you can default the Build and Revision Numbers 
+// by using the '*' as shown below:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]


[11/50] [abbrv] lucenenet git commit: Added QueryParser.Analyzing namespace + tests.

Posted by sy...@apache.org.
Added QueryParser.Analyzing namespace + tests.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/11d74493
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/11d74493
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/11d74493

Branch: refs/heads/master
Commit: 11d7449396fb3fe3062482147b9228bf3615dd8e
Parents: 1937dda
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Mon Aug 1 01:51:25 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:30:20 2016 +0700

----------------------------------------------------------------------
 .../Analyzing/AnalyzingQueryParser.cs           | 198 +++++++++++
 .../Lucene.Net.QueryParser.csproj               |   1 +
 .../Properties/AssemblyInfo.cs                  |   3 +
 .../Analyzing/TestAnalyzingQueryParser.cs       | 341 +++++++++++++++++++
 .../Classic/TestMultiAnalyzer.cs                |   2 +-
 .../Classic/TestQueryParser.cs                  |  14 +-
 .../Lucene.Net.Tests.QueryParser.csproj         |   1 +
 7 files changed, 552 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/11d74493/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs b/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
new file mode 100644
index 0000000..8930aa4
--- /dev/null
+++ b/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
@@ -0,0 +1,198 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.QueryParser.Classic;
+using Lucene.Net.Search;
+using Lucene.Net.Util;
+using System.Text;
+using System.Text.RegularExpressions;
+
+namespace Lucene.Net.QueryParser.Analyzing
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Overrides Lucene's default QueryParser so that Fuzzy-, Prefix-, Range-, and WildcardQuerys
+    /// are also passed through the given analyzer, but wildcard characters <code>*</code> and
+    /// <code>?</code> don't get removed from the search terms.
+    /// 
+    /// <p><b>Warning:</b> This class should only be used with analyzers that do not use stopwords
+    /// or that add tokens. Also, several stemming analyzers are inappropriate: for example, GermanAnalyzer 
+    /// will turn <code>H&auml;user</code> into <code>hau</code>, but <code>H?user</code> will 
+    /// become <code>h?user</code> when using this parser and thus no match would be found (i.e.
+    /// using this parser will be no improvement over QueryParser in such cases). 
+    /// </summary>
+    public class AnalyzingQueryParser : Classic.QueryParser
+    {
+        // gobble escaped chars or find a wildcard character 
+        private readonly Regex wildcardPattern = new Regex(@"(\\.)|([?*]+)", RegexOptions.Compiled);
+
+        public AnalyzingQueryParser(LuceneVersion matchVersion, string field, Analyzer analyzer)
+            : base(matchVersion, field, analyzer)
+        {
+            AnalyzeRangeTerms = true;
+        }
+
+        /// <summary>
+        /// Called when parser parses an input term
+        /// that uses prefix notation; that is, contains a single '*' wildcard
+        /// character as its last character. Since this is a special case
+        /// of generic wildcard term, and such a query can be optimized easily,
+        /// this usually results in a different query object.
+        /// <p>
+        /// Depending on analyzer and settings, a prefix term may (most probably will)
+        /// be lower-cased automatically. It <b>will</b> go through the default Analyzer.
+        /// <p>
+        /// Overrides super class, by passing terms through analyzer.
+        /// </summary>
+        /// <param name="field">Name of the field query will use.</param>
+        /// <param name="termStr">Term to use for building term for the query
+        /// (<b>without</b> trailing '*' character!)</param>
+        /// <returns>Resulting <see cref="Query"/> built for the term</returns>
+        protected internal override Query GetWildcardQuery(string field, string termStr)
+        {
+            if (termStr == null)
+            {
+                //can't imagine this would ever happen
+                throw new ParseException("Passed null value as term to getWildcardQuery");
+            }
+            if (!AllowLeadingWildcard && (termStr.StartsWith("*") || termStr.StartsWith("?")))
+            {
+                throw new ParseException("'*' or '?' not allowed as first character in WildcardQuery"
+                                        + " unless getAllowLeadingWildcard() returns true");
+            }
+
+            Match wildcardMatcher = wildcardPattern.Match(termStr);
+            StringBuilder sb = new StringBuilder();
+            int last = 0;
+
+            while (wildcardMatcher.Success)
+            {
+                // continue if escaped char
+                if (wildcardMatcher.Groups[1].Success)
+                {
+                    wildcardMatcher = wildcardMatcher.NextMatch();
+                    continue;
+                }
+
+                if (wildcardMatcher.Index > last)
+                {
+                    string chunk = termStr.Substring(last, wildcardMatcher.Index - last);
+                    string analyzed = AnalyzeSingleChunk(field, termStr, chunk);
+                    sb.Append(analyzed);
+                }
+
+                //append the wildcard character
+                sb.Append(wildcardMatcher.Groups[2]);
+
+                last = wildcardMatcher.Index + wildcardMatcher.Length;
+                wildcardMatcher = wildcardMatcher.NextMatch();
+            }
+            if (last < termStr.Length)
+            {
+                sb.Append(AnalyzeSingleChunk(field, termStr, termStr.Substring(last)));
+            }
+            return base.GetWildcardQuery(field, sb.ToString());
+        }
+
+        /// <summary>
+        /// Called when parser parses an input term that has the fuzzy suffix (~) appended.
+        /// <p>
+        /// Depending on analyzer and settings, a fuzzy term may (most probably will)
+        /// be lower-cased automatically. It <b>will</b> go through the default Analyzer.
+        /// <p>
+        /// Overrides super class, by passing terms through analyzer.
+        /// </summary>
+        /// <param name="field">Name of the field query will use.</param>
+        /// <param name="termStr">Term to use for building term for the query</param>
+        /// <param name="minSimilarity"></param>
+        /// <returns>Resulting <see cref="Query"/> built for the term</returns>
+        protected internal override Query GetFuzzyQuery(string field, string termStr, float minSimilarity)
+        {
+            string analyzed = AnalyzeSingleChunk(field, termStr, termStr);
+            return base.GetFuzzyQuery(field, analyzed, minSimilarity);
+        }
+
+        /// <summary>
+        /// Returns the analyzed form for the given chunk
+        /// 
+        /// If the analyzer produces more than one output token from the given chunk,
+        /// a ParseException is thrown.
+        /// </summary>
+        /// <param name="field">The target field</param>
+        /// <param name="termStr">The full term from which the given chunk is excerpted</param>
+        /// <param name="chunk">The portion of the given termStr to be analyzed</param>
+        /// <returns>The result of analyzing the given chunk</returns>
+        /// <exception cref="ParseException">ParseException when analysis returns other than one output token</exception>
+        protected internal string AnalyzeSingleChunk(string field, string termStr, string chunk)
+        {
+            string analyzed = null;
+            TokenStream stream = null;
+            try
+            {
+                stream = Analyzer.TokenStream(field, chunk);
+                stream.Reset();
+                ICharTermAttribute termAtt = stream.GetAttribute<ICharTermAttribute>();
+                // get first and hopefully only output token
+                if (stream.IncrementToken())
+                {
+                    analyzed = termAtt.ToString();
+
+                    // try to increment again, there should only be one output token
+                    StringBuilder multipleOutputs = null;
+                    while (stream.IncrementToken())
+                    {
+                        if (null == multipleOutputs)
+                        {
+                            multipleOutputs = new StringBuilder();
+                            multipleOutputs.Append('"');
+                            multipleOutputs.Append(analyzed);
+                            multipleOutputs.Append('"');
+                        }
+                        multipleOutputs.Append(',');
+                        multipleOutputs.Append('"');
+                        multipleOutputs.Append(termAtt.ToString());
+                        multipleOutputs.Append('"');
+                    }
+                    stream.End();
+                    if (null != multipleOutputs)
+                    {
+                        throw new ParseException(
+                            string.Format(Locale, "Analyzer created multiple terms for \"%s\": %s", chunk, multipleOutputs.ToString()));
+                    }
+                }
+                else
+                {
+                    // nothing returned by analyzer.  Was it a stop word and the user accidentally
+                    // used an analyzer with stop words?
+                    stream.End();
+                    throw new ParseException(string.Format(Locale, "Analyzer returned nothing for \"%s\"", chunk));
+                }
+            }
+            catch (System.IO.IOException e)
+            {
+                throw new ParseException(
+                    string.Format(Locale, "IO error while trying to analyze single term: \"%s\"", termStr));
+            }
+            finally
+            {
+                IOUtils.CloseWhileHandlingException(stream);
+            }
+            return analyzed;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/11d74493/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj b/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
index 9f1da43..2c0619c 100644
--- a/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
+++ b/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
@@ -39,6 +39,7 @@
     <Reference Include="System.Xml" />
   </ItemGroup>
   <ItemGroup>
+    <Compile Include="Analyzing\AnalyzingQueryParser.cs" />
     <Compile Include="Classic\CharStream.cs" />
     <Compile Include="Classic\FastCharStream.cs" />
     <Compile Include="Classic\MultiFieldQueryParser.cs" />

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/11d74493/Lucene.Net.QueryParser/Properties/AssemblyInfo.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Properties/AssemblyInfo.cs b/Lucene.Net.QueryParser/Properties/AssemblyInfo.cs
index 661ee8d..023bf34 100644
--- a/Lucene.Net.QueryParser/Properties/AssemblyInfo.cs
+++ b/Lucene.Net.QueryParser/Properties/AssemblyInfo.cs
@@ -22,6 +22,9 @@ using System.Runtime.InteropServices;
 // The following GUID is for the ID of the typelib if this project is exposed to COM
 [assembly: Guid("7c58cf05-89dd-4c02-a948-c28cdaf05247")]
 
+// for testing
+[assembly: InternalsVisibleTo("Lucene.Net.Tests.QueryParser")]
+
 // Version information for an assembly consists of the following four values:
 //
 //      Major Version

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/11d74493/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs b/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs
new file mode 100644
index 0000000..10756cf
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs
@@ -0,0 +1,341 @@
+\ufeffusing System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.QueryParser.Classic;
+using Lucene.Net.Search;
+using Lucene.Net.Store;
+using Lucene.Net.Util;
+using NUnit.Framework;
+
+namespace Lucene.Net.QueryParser.Analyzing
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestAnalyzingQueryParser : LuceneTestCase
+    {
+        private readonly static string FIELD = "field";
+
+        private Analyzer a;
+
+        private string[] wildcardInput;
+        private string[] wildcardExpected;
+        private string[] prefixInput;
+        private string[] prefixExpected;
+        private string[] rangeInput;
+        private string[] rangeExpected;
+        private string[] fuzzyInput;
+        private string[] fuzzyExpected;
+
+        private IDictionary<string, string> wildcardEscapeHits = new Dictionary<string, string>();
+        private IDictionary<string, string> wildcardEscapeMisses = new Dictionary<string, string>();
+
+        public override void SetUp()
+        {
+            base.SetUp();
+            wildcardInput = new string[] { "*bersetzung �ber*ung",
+                "M�tley Cr\u00fce M�tl?* Cr�?", "Ren�e Zellweger Ren?? Zellw?ger" };
+            wildcardExpected = new string[] { "*bersetzung uber*ung", "motley crue motl?* cru?",
+                "renee zellweger ren?? zellw?ger" };
+
+            prefixInput = new string[] { "�bersetzung �bersetz*",
+                "M�tley Cr�e M�tl* cr�*", "Ren�? Zellw*" };
+            prefixExpected = new string[] { "ubersetzung ubersetz*", "motley crue motl* cru*",
+                "rene? zellw*" };
+
+            rangeInput = new string[] { "[aa TO bb]", "{Ana�s TO Zo�}" };
+            rangeExpected = new string[] { "[aa TO bb]", "{anais TO zoe}" };
+
+            fuzzyInput = new string[] { "�bersetzung �bersetzung~0.9",
+                "M�tley Cr�e M�tley~0.75 Cr�e~0.5",
+                "Ren�e Zellweger Ren�e~0.9 Zellweger~" };
+            fuzzyExpected = new string[] { "ubersetzung ubersetzung~1",
+                "motley crue motley~1 crue~2", "renee zellweger renee~0 zellweger~2" };
+
+            wildcardEscapeHits["m�*tley"] = "moatley";
+
+            // need to have at least one genuine wildcard to trigger the wildcard analysis
+            // hence the * before the y
+            wildcardEscapeHits["m�\\*tl*y"] = "mo*tley";
+
+            // escaped backslash then true wildcard
+            wildcardEscapeHits["m�\\\\*tley"] = "mo\\atley";
+
+            // escaped wildcard then true wildcard
+            wildcardEscapeHits["m�\\??ley"] = "mo?tley";
+
+            // the first is an escaped * which should yield a miss
+            wildcardEscapeMisses["m�\\*tl*y"] = "moatley";
+
+            a = new ASCIIAnalyzer();
+        }
+
+        [Test]
+        public void TestSingleChunkExceptions()
+        {
+            bool ex = false;
+            string termStr = "the*tre";
+
+            Analyzer stopsAnalyzer = new MockAnalyzer
+                (Random(), MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET);
+            try
+            {
+                string q = ParseWithAnalyzingQueryParser(termStr, stopsAnalyzer, true);
+            }
+            catch (ParseException e)
+            {
+                if (e.Message.Contains("returned nothing"))
+                {
+                    ex = true;
+                }
+            }
+            assertEquals("Should have returned nothing", true, ex);
+            ex = false;
+
+            AnalyzingQueryParser qp = new AnalyzingQueryParser(TEST_VERSION_CURRENT, FIELD, a);
+            try
+            {
+                qp.AnalyzeSingleChunk(FIELD, "", "not a single chunk");
+            }
+            catch (ParseException e)
+            {
+                if (e.Message.Contains("multiple terms"))
+                {
+                    ex = true;
+                }
+            }
+            assertEquals("Should have produced multiple terms", true, ex);
+        }
+
+        [Test]
+        public void TestWildcardAlone()
+        {
+            //seems like crazy edge case, but can be useful in concordance 
+            bool pex = false;
+            try
+            {
+                Query q = GetAnalyzedQuery("*", a, false);
+            }
+            catch (ParseException e)
+            {
+                pex = true;
+            }
+            assertEquals("Wildcard alone with allowWildcard=false", true, pex);
+
+            pex = false;
+            try
+            {
+                String qString = ParseWithAnalyzingQueryParser("*", a, true);
+                assertEquals("Every word", "*", qString);
+            }
+            catch (ParseException e)
+            {
+                pex = true;
+            }
+
+            assertEquals("Wildcard alone with allowWildcard=true", false, pex);
+        }
+
+        [Test]
+        public void TestWildCardEscapes()
+        {
+            foreach (var entry in wildcardEscapeHits)
+            {
+                Query q = GetAnalyzedQuery(entry.Key, a, false);
+                assertEquals("WildcardEscapeHits: " + entry.Key, true, IsAHit(q, entry.Value, a));
+            }
+            foreach (var entry in wildcardEscapeMisses)
+            {
+                Query q = GetAnalyzedQuery(entry.Key, a, false);
+                assertEquals("WildcardEscapeMisses: " + entry.Key, false, IsAHit(q, entry.Value, a));
+            }
+        }
+
+        [Test]
+        public void TestWildCardQueryNoLeadingAllowed()
+        {
+            bool ex = false;
+            try
+            {
+                string q = ParseWithAnalyzingQueryParser(wildcardInput[0], a, false);
+
+            }
+            catch (ParseException e)
+            {
+                ex = true;
+            }
+            assertEquals("Testing initial wildcard not allowed",
+                true, ex);
+        }
+
+        [Test]
+        public void TestWildCardQuery()
+        {
+            for (int i = 0; i < wildcardInput.Length; i++)
+            {
+                assertEquals("Testing wildcards with analyzer " + a.GetType() + ", input string: "
+                    + wildcardInput[i], wildcardExpected[i], ParseWithAnalyzingQueryParser(wildcardInput[i], a, true));
+            }
+        }
+
+        [Test]
+        public void TestPrefixQuery()
+        {
+            for (int i = 0; i < prefixInput.Length; i++)
+            {
+                assertEquals("Testing prefixes with analyzer " + a.GetType() + ", input string: "
+                    + prefixInput[i], prefixExpected[i], ParseWithAnalyzingQueryParser(prefixInput[i], a, false));
+            }
+        }
+
+        [Test]
+        public void TestRangeQuery()
+        {
+            for (int i = 0; i < rangeInput.Length; i++)
+            {
+                assertEquals("Testing ranges with analyzer " + a.GetType() + ", input string: "
+                    + rangeInput[i], rangeExpected[i], ParseWithAnalyzingQueryParser(rangeInput[i], a, false));
+            }
+        }
+
+        [Test]
+        public void TestFuzzyQuery()
+        {
+            for (int i = 0; i < fuzzyInput.Length; i++)
+            {
+                assertEquals("Testing fuzzys with analyzer " + a.GetType() + ", input string: "
+                  + fuzzyInput[i], fuzzyExpected[i], ParseWithAnalyzingQueryParser(fuzzyInput[i], a, false));
+            }
+        }
+
+
+        private string ParseWithAnalyzingQueryParser(string s, Analyzer a, bool allowLeadingWildcard)
+        {
+            Query q = GetAnalyzedQuery(s, a, allowLeadingWildcard);
+            return q.ToString(FIELD);
+        }
+
+        private Query GetAnalyzedQuery(string s, Analyzer a, bool allowLeadingWildcard)
+        {
+            AnalyzingQueryParser qp = new AnalyzingQueryParser(TEST_VERSION_CURRENT, FIELD, a);
+            qp.AllowLeadingWildcard = allowLeadingWildcard;
+            Query q = qp.Parse(s);
+            return q;
+        }
+
+        internal sealed class FoldingFilter : TokenFilter
+        {
+            private readonly ICharTermAttribute termAtt;
+
+            public FoldingFilter(TokenStream input)
+                : base(input)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+            }
+
+            public sealed override bool IncrementToken()
+            {
+                if (input.IncrementToken())
+                {
+                    char[] term = termAtt.Buffer();
+                    for (int i = 0; i < term.Length; i++)
+                        switch (term[i])
+                        {
+                            case '�':
+                                term[i] = 'u';
+                                break;
+                            case '�':
+                                term[i] = 'o';
+                                break;
+                            case '�':
+                                term[i] = 'e';
+                                break;
+                            case '�':
+                                term[i] = 'i';
+                                break;
+                        }
+                    return true;
+                }
+                else
+                {
+                    return false;
+                }
+            }
+        }
+
+        internal sealed class ASCIIAnalyzer : Analyzer
+        {
+
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                Tokenizer result = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
+                return new TokenStreamComponents(result, new FoldingFilter(result));
+            }
+        }
+
+        // LUCENE-4176
+        [Test]
+        public void TestByteTerms()
+        {
+            string s = "\u0e40\u0e02";
+            Analyzer analyzer = new MockBytesAnalyzer();
+            Classic.QueryParser qp = new AnalyzingQueryParser(TEST_VERSION_CURRENT, FIELD, analyzer);
+            Query q = qp.Parse("[\u0e40\u0e02 TO \u0e40\u0e02]");
+            assertEquals(true, IsAHit(q, s, analyzer));
+        }
+
+        private bool IsAHit(Query q, string content, Analyzer analyzer)
+        {
+            int hits;
+            using (Directory ramDir = NewDirectory())
+            {
+                using (RandomIndexWriter writer = new RandomIndexWriter(Random(), ramDir, analyzer))
+                {
+                    Document doc = new Document();
+                    FieldType fieldType = new FieldType();
+                    fieldType.Indexed = (true);
+                    fieldType.Tokenized = (true);
+                    fieldType.Stored = (true);
+                    Field field = new Field(FIELD, content, fieldType);
+                    doc.Add(field);
+                    writer.AddDocument(doc);
+                }
+                using (DirectoryReader ir = DirectoryReader.Open(ramDir))
+                {
+                    IndexSearcher @is = new IndexSearcher(ir);
+
+                    hits = @is.Search(q, 10).TotalHits;
+                }
+            }
+            if (hits == 1)
+            {
+                return true;
+            }
+            else
+            {
+                return false;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/11d74493/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs b/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
index c4f3a7b..350f181 100644
--- a/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
+++ b/Lucene.Net.Tests.QueryParser/Classic/TestMultiAnalyzer.cs
@@ -250,7 +250,7 @@ namespace Lucene.Net.QueryParser.Classic
             }
 
             // wrap super's version
-            protected override Query GetFieldQuery(string field, string queryText, bool quoted)
+            protected internal override Query GetFieldQuery(string field, string queryText, bool quoted)
             {
                 return new DumbQueryWrapper(GetSuperFieldQuery(field, queryText, quoted));
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/11d74493/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs b/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
index 235c8ae..369fe92 100644
--- a/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
+++ b/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
@@ -38,12 +38,12 @@ namespace Lucene.Net.QueryParser.Classic
             {
             }
 
-            protected override Query GetFuzzyQuery(string field, string termStr, float minSimilarity)
+            protected internal override Query GetFuzzyQuery(string field, string termStr, float minSimilarity)
             {
                 throw new ParseException("Fuzzy queries not allowed");
             }
 
-            protected override Query GetWildcardQuery(string field, string termStr)
+            protected internal override Query GetWildcardQuery(string field, string termStr)
             {
                 throw new ParseException("Wildcard queries not allowed");
             }
@@ -177,7 +177,7 @@ namespace Lucene.Net.QueryParser.Classic
                 : base(TEST_VERSION_CURRENT, "a", new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false))
             {}
 
-            protected override Query HandleBareFuzzy(string qfield, Token fuzzySlop, string termImage)
+            protected internal override Query HandleBareFuzzy(string qfield, Token fuzzySlop, string termImage)
             {
                 if (fuzzySlop.image.EndsWith("\u20ac"))
                 {
@@ -209,21 +209,21 @@ namespace Lucene.Net.QueryParser.Classic
                 : base(TEST_VERSION_CURRENT, "field", new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false))
             { }
 
-            protected override Query GetWildcardQuery(string field, string termStr)
+            protected internal override Query GetWildcardQuery(string field, string termStr)
             {
                 // override error checking of superclass
                 type[0] = 1;
                 return new TermQuery(new Index.Term(field, termStr));
             }
 
-            protected override Query GetPrefixQuery(string field, string termStr)
+            protected internal override Query GetPrefixQuery(string field, string termStr)
             {
                 // override error checking of superclass
                 type[0] = 2;
                 return new TermQuery(new Index.Term(field, termStr));
             }
 
-            protected override Query GetFieldQuery(string field, string queryText, bool quoted)
+            protected internal override Query GetFieldQuery(string field, string queryText, bool quoted)
             {
                 type[0] = 3;
                 return base.GetFieldQuery(field, queryText, quoted);
@@ -314,7 +314,7 @@ namespace Lucene.Net.QueryParser.Classic
             {
             }
 
-            protected override Query GetFieldQuery(string field, string queryText, bool quoted)
+            protected internal override Query GetFieldQuery(string field, string queryText, bool quoted)
             {
                 if (quoted) return NewFieldQuery(morePrecise, field, queryText, quoted);
                 else return base.GetFieldQuery(field, queryText, quoted);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/11d74493/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj b/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
index 99a5e91..0f9e86c 100644
--- a/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
+++ b/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
@@ -43,6 +43,7 @@
     <Reference Include="System.Xml" />
   </ItemGroup>
   <ItemGroup>
+    <Compile Include="Analyzing\TestAnalyzingQueryParser.cs" />
     <Compile Include="Classic\TestMultiFieldQueryParser.cs" />
     <Compile Include="Classic\TestMultiPhraseQueryParsing.cs" />
     <Compile Include="Classic\TestQueryParser.cs" />


[38/50] [abbrv] lucenenet git commit: Added missing guard clause to SimpleQueryParser.DefaultOperator.

Posted by sy...@apache.org.
Added missing guard clause to SimpleQueryParser.DefaultOperator.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/7b7b6348
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/7b7b6348
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/7b7b6348

Branch: refs/heads/master
Commit: 7b7b6348506b50734dad58fb0d2099904510a13b
Parents: 387d985
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Tue Aug 2 18:32:52 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:30:59 2016 +0700

----------------------------------------------------------------------
 .../Simple/SimpleQueryParser.cs                       | 14 ++++++++++++--
 1 file changed, 12 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/7b7b6348/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
index 1029c8b..8964d56 100644
--- a/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
@@ -721,8 +721,18 @@ namespace Lucene.Net.QueryParser.Simple
         /// </summary>
         public virtual BooleanClause.Occur DefaultOperator
         {
-            get { return defaultOperator; }
-            set { defaultOperator = value; }
+            get 
+            { 
+                return defaultOperator; 
+            }
+            set 
+            {
+                if (value != BooleanClause.Occur.SHOULD && value != BooleanClause.Occur.MUST)
+                {
+                    throw new ArgumentException("invalid operator: only SHOULD or MUST are allowed");
+                }
+                defaultOperator = value; 
+            }
         }
 
 


[03/50] [abbrv] lucenenet git commit: Refactoring to ensure that the abstract members are available in the context of the base class test (otherwise many of the tests get a NotImplementedException on the QueryParserTestBase).

Posted by sy...@apache.org.
Refactoring to ensure that the abstract members are available in the context of the base class test (otherwise many of the tests get a NotImplementedException on the QueryParserTestBase).


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/d5c43723
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/d5c43723
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/d5c43723

Branch: refs/heads/master
Commit: d5c43723df31652ca9dd814f06f0a8ba7ea50853
Parents: 6d71156
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Jul 31 17:12:18 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:29:42 2016 +0700

----------------------------------------------------------------------
 .../Classic/TestQueryParser.cs                  | 140 ++++++++++---------
 .../Util/QueryParserTestBase.cs                 |  42 ++++--
 2 files changed, 108 insertions(+), 74 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d5c43723/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs b/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
index 0130bca..35acad2 100644
--- a/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
+++ b/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
@@ -50,71 +50,81 @@ namespace Lucene.Net.QueryParser.Classic
 
         }
 
-        public QueryParser GetParser(Analyzer a)
-        {
-            if (a == null) a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
-            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, DefaultField, a);
-            qp.DefaultOperator = (QueryParserBase.OR_OPERATOR);
-            return qp;
-        }
-
-        public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a)
-        {
-            return GetParser(a);
-        }
-
-        public override Query GetQuery(string query, ICommonQueryParserConfiguration cqpC)
-        {
-            Debug.Assert(cqpC != null, "Parameter must not be null");
-            Debug.Assert(cqpC is QueryParser, "Parameter must be instance of QueryParser");
-            QueryParser qp = (QueryParser)cqpC;
-            return qp.Parse(query);
-        }
-
-        public override Query GetQuery(string query, Analyzer a)
-        {
-            return GetParser(a).Parse(query);
-        }
-
-        public override bool IsQueryParserException(Exception exception)
-        {
-            return exception is ParseException;
-        }
-
-        public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC)
-        {
-            Debug.Assert(cqpC is QueryParser);
-            QueryParser qp = (QueryParser)cqpC;
-            qp.DefaultOperator = QueryParserBase.Operator.OR;
-        }
-
-        public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC)
-        {
-            Debug.Assert(cqpC is QueryParser);
-            QueryParser qp = (QueryParser)cqpC;
-            qp.DefaultOperator = QueryParserBase.Operator.AND;
-        }
-
-        public override void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC, bool value)
-        {
-            Debug.Assert(cqpC is QueryParser);
-            QueryParser qp = (QueryParser)cqpC;
-            qp.AnalyzeRangeTerms = (value);
-        }
-
-        public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC, bool value)
-        {
-            Debug.Assert(cqpC is QueryParser);
-            QueryParser qp = (QueryParser)cqpC;
-            qp.AutoGeneratePhraseQueries = value;
-        }
-
-        public override void SetDateResolution(ICommonQueryParserConfiguration cqpC, ICharSequence field, DateTools.Resolution value)
-        {
-            Debug.Assert(cqpC is QueryParser);
-            QueryParser qp = (QueryParser)cqpC;
-            qp.SetDateResolution(field.toString(), value);
-        }
+        // Moved to QueryParserTestBase
+        //public QueryParser GetParser(Analyzer a)
+        //{
+        //    if (a == null) a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+        //    QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, DefaultField, a);
+        //    qp.DefaultOperator = (QueryParserBase.OR_OPERATOR);
+        //    return qp;
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a)
+        //{
+        //    return GetParser(a);
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override Query GetQuery(string query, ICommonQueryParserConfiguration cqpC)
+        //{
+        //    Debug.Assert(cqpC != null, "Parameter must not be null");
+        //    Debug.Assert(cqpC is QueryParser, "Parameter must be instance of QueryParser");
+        //    QueryParser qp = (QueryParser)cqpC;
+        //    return qp.Parse(query);
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override Query GetQuery(string query, Analyzer a)
+        //{
+        //    return GetParser(a).Parse(query);
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override bool IsQueryParserException(Exception exception)
+        //{
+        //    return exception is ParseException;
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC)
+        //{
+        //    Debug.Assert(cqpC is QueryParser);
+        //    QueryParser qp = (QueryParser)cqpC;
+        //    qp.DefaultOperator = QueryParserBase.Operator.OR;
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC)
+        //{
+        //    Debug.Assert(cqpC is QueryParser);
+        //    QueryParser qp = (QueryParser)cqpC;
+        //    qp.DefaultOperator = QueryParserBase.Operator.AND;
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC, bool value)
+        //{
+        //    Debug.Assert(cqpC is QueryParser);
+        //    QueryParser qp = (QueryParser)cqpC;
+        //    qp.AnalyzeRangeTerms = (value);
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC, bool value)
+        //{
+        //    Debug.Assert(cqpC is QueryParser);
+        //    QueryParser qp = (QueryParser)cqpC;
+        //    qp.AutoGeneratePhraseQueries = value;
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override void SetDateResolution(ICommonQueryParserConfiguration cqpC, ICharSequence field, DateTools.Resolution value)
+        //{
+        //    Debug.Assert(cqpC is QueryParser);
+        //    QueryParser qp = (QueryParser)cqpC;
+        //    qp.SetDateResolution(field.toString(), value);
+        //}
 
         [Test]
         public override void TestDefaultOperator()

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d5c43723/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs b/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
index 40aa777..f8e290e 100644
--- a/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
+++ b/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
@@ -12,6 +12,7 @@ using Lucene.Net.Util.Automaton;
 using NUnit.Framework;
 using System;
 using System.Collections.Generic;
+using System.Diagnostics;
 using System.Globalization;
 using System.Linq;
 using System.Text;
@@ -112,58 +113,81 @@ namespace Lucene.Net.QueryParser.Util
             originalMaxClauses = BooleanQuery.MaxClauseCount;
         }
 
+        // Moved from TestQueryParser
+        public Classic.QueryParser GetParser(Analyzer a)
+        {
+            if (a == null) a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+            Classic.QueryParser qp = new Classic.QueryParser(TEST_VERSION_CURRENT, DefaultField, a);
+            qp.DefaultOperator = (QueryParserBase.OR_OPERATOR);
+            return qp;
+        }
+
         // Moved to AbstractQueryParserTestBase
         public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a)
         {
-            throw new NotImplementedException();
+            return GetParser(a);
         }
 
         // Moved to AbstractQueryParserTestBase
         public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC)
         {
-            throw new NotImplementedException();
+            Debug.Assert(cqpC is Classic.QueryParser);
+            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
+            qp.DefaultOperator = QueryParserBase.Operator.OR;
         }
 
         // Moved to AbstractQueryParserTestBase
         public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC)
         {
-            throw new NotImplementedException();
+            Debug.Assert(cqpC is Classic.QueryParser);
+            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
+            qp.DefaultOperator = QueryParserBase.Operator.AND;
         }
 
         // Moved to AbstractQueryParserTestBase
         public override void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC, bool value)
         {
-            throw new NotImplementedException();
+            Debug.Assert(cqpC is Classic.QueryParser);
+            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
+            qp.AnalyzeRangeTerms = (value);
         }
 
         // Moved to AbstractQueryParserTestBase
         public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC, bool value)
         {
-            throw new NotImplementedException();
+            Debug.Assert(cqpC is Classic.QueryParser);
+            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
+            qp.AutoGeneratePhraseQueries = value;
         }
 
         // Moved to AbstractQueryParserTestBase
         public override void SetDateResolution(ICommonQueryParserConfiguration cqpC, ICharSequence field, DateTools.Resolution value)
         {
-            throw new NotImplementedException();
+            Debug.Assert(cqpC is Classic.QueryParser);
+            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
+            qp.SetDateResolution(field.toString(), value);
         }
 
         // Moved to AbstractQueryParserTestBase
         public override Query GetQuery(string query, ICommonQueryParserConfiguration cqpC)
         {
-            throw new NotImplementedException();
+            Debug.Assert(cqpC != null, "Parameter must not be null");
+            Debug.Assert(cqpC is Classic.QueryParser, "Parameter must be instance of QueryParser");
+            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
+            return qp.Parse(query);
         }
 
         // Moved to AbstractQueryParserTestBase
         public override Query GetQuery(string query, Analyzer a)
         {
-            throw new NotImplementedException();
+            return GetParser(a).Parse(query);
         }
 
+
         // Moved to AbstractQueryParserTestBase
         public override bool IsQueryParserException(Exception exception)
         {
-            throw new NotImplementedException();
+            return exception is ParseException;
         }
 
         public Query GetQuery(string query)


[42/50] [abbrv] lucenenet git commit: Fixed ToStringUtils.Boost() for when the number to the left of the decimal is 0.

Posted by sy...@apache.org.
Fixed ToStringUtils.Boost() for when the number to the left of the decimal is 0.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/34284ee1
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/34284ee1
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/34284ee1

Branch: refs/heads/master
Commit: 34284ee1ec18cc5ba76b2bc0a060b682de53f13c
Parents: 66ab301
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Aug 7 19:41:27 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:31:19 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Util/ToStringUtils.cs           | 2 +-
 src/Lucene.Net.Tests/core/Util/TestToStringUtils.cs | 4 ++++
 2 files changed, 5 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/34284ee1/src/Lucene.Net.Core/Util/ToStringUtils.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/ToStringUtils.cs b/src/Lucene.Net.Core/Util/ToStringUtils.cs
index 3e1f938..2fcf898 100644
--- a/src/Lucene.Net.Core/Util/ToStringUtils.cs
+++ b/src/Lucene.Net.Core/Util/ToStringUtils.cs
@@ -38,7 +38,7 @@ namespace Lucene.Net.Util
             if (boost != 1.0f)
             {
                 // .NET compatibility fix
-                return "^" + boost.ToString(".0######", CultureInfo.InvariantCulture);
+                return "^" + boost.ToString("0.0######", CultureInfo.InvariantCulture);
             }
             else
                 return "";

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/34284ee1/src/Lucene.Net.Tests/core/Util/TestToStringUtils.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Util/TestToStringUtils.cs b/src/Lucene.Net.Tests/core/Util/TestToStringUtils.cs
index a48cd23..6839745 100644
--- a/src/Lucene.Net.Tests/core/Util/TestToStringUtils.cs
+++ b/src/Lucene.Net.Tests/core/Util/TestToStringUtils.cs
@@ -33,6 +33,8 @@ namespace Lucene.Net.Core.Util
             float boostFractional = 2.5f;
             float boostNonFractional = 5f;
             float boostLong = 1.111111111f;
+            float boostZeroNonFractional = 0f;
+            float boostZeroFractional = 0.123f;
 
             foreach (CultureInfo culture in CultureInfo.GetCultures(CultureTypes.SpecificCultures | CultureTypes.NeutralCultures))
             {
@@ -42,6 +44,8 @@ namespace Lucene.Net.Core.Util
                 assertEquals("^2.5", ToStringUtils.Boost(boostFractional));
                 assertEquals("^5.0", ToStringUtils.Boost(boostNonFractional));
                 assertEquals("^1.111111", ToStringUtils.Boost(boostLong));
+                assertEquals("^0.0", ToStringUtils.Boost(boostZeroNonFractional));
+                assertEquals("^0.123", ToStringUtils.Boost(boostZeroFractional));
             }
         }
     }


[28/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs b/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
deleted file mode 100644
index b879008..0000000
--- a/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
+++ /dev/null
@@ -1,1523 +0,0 @@
-\ufeffusing Lucene.Net.Analysis;
-using Lucene.Net.Analysis.Tokenattributes;
-using Lucene.Net.Documents;
-using Lucene.Net.Index;
-using Lucene.Net.QueryParser.Classic;
-using Lucene.Net.QueryParser.Flexible.Standard;
-using Lucene.Net.Search;
-using Lucene.Net.Store;
-using Lucene.Net.Support;
-using Lucene.Net.Util;
-using Lucene.Net.Util.Automaton;
-using NUnit.Framework;
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.Globalization;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-
-namespace Lucene.Net.QueryParser.Util
-{
-    /// <summary>
-    /// In .NET the abstact members were moved to AbstractQueryParserTestBase
-    /// because the Visual Studio test runner does not find or run tests in 
-    /// abstract classes.
-    /// </summary>
-    [TestFixture]
-    public class QueryParserTestBase : AbstractQueryParserTestBase
-    {
-        public static Analyzer qpAnalyzer;
-
-        [TestFixtureSetUp]
-        public static void BeforeClass()
-        {
-            qpAnalyzer = new QPTestAnalyzer();
-        }
-
-        [TestFixtureTearDown]
-        public static void AfterClass()
-        {
-            qpAnalyzer = null;
-        }
-
-        public sealed class QPTestFilter : TokenFilter
-        {
-            ICharTermAttribute termAtt;
-            IOffsetAttribute offsetAtt;
-
-            /**
-             * Filter which discards the token 'stop' and which expands the
-             * token 'phrase' into 'phrase1 phrase2'
-             */
-            public QPTestFilter(TokenStream @in)
-                : base(@in)
-            {
-                termAtt = AddAttribute<ICharTermAttribute>();
-                offsetAtt = AddAttribute<IOffsetAttribute>();
-            }
-
-            bool inPhrase = false;
-            int savedStart = 0, savedEnd = 0;
-
-            public override sealed bool IncrementToken()
-            {
-                if (inPhrase)
-                {
-                    inPhrase = false;
-                    ClearAttributes();
-                    termAtt.Append("phrase2");
-                    offsetAtt.SetOffset(savedStart, savedEnd);
-                    return true;
-                }
-                else
-                    while (input.IncrementToken())
-                    {
-                        if (termAtt.toString().Equals("phrase"))
-                        {
-                            inPhrase = true;
-                            savedStart = offsetAtt.StartOffset();
-                            savedEnd = offsetAtt.EndOffset();
-                            termAtt.SetEmpty().Append("phrase1");
-                            offsetAtt.SetOffset(savedStart, savedEnd);
-                            return true;
-                        }
-                        else if (!termAtt.toString().equals("stop"))
-                            return true;
-                    }
-                return false;
-            }
-        }
-
-        public sealed class QPTestAnalyzer : Analyzer
-        {
-            /// <summary>
-            /// Filters MockTokenizer with StopFilter.
-            /// </summary>
-            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
-            {
-                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
-                return new TokenStreamComponents(tokenizer, new QPTestFilter(tokenizer));
-            }
-        }
-
-        private int originalMaxClauses;
-
-        private string defaultField = "field";
-        public string DefaultField { get { return defaultField; } set { defaultField = value; } }
-
-        public override void SetUp()
-        {
-            base.SetUp();
-            originalMaxClauses = BooleanQuery.MaxClauseCount;
-        }
-
-        // Moved from TestQueryParser
-        public virtual Classic.QueryParser GetParser(Analyzer a)
-        {
-            if (a == null) a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
-            Classic.QueryParser qp = new Classic.QueryParser(TEST_VERSION_CURRENT, DefaultField, a);
-            qp.DefaultOperator = (QueryParserBase.OR_OPERATOR);
-            return qp;
-        }
-
-        // Moved to AbstractQueryParserTestBase
-        public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a)
-        {
-            return GetParser(a);
-        }
-
-        // Moved to AbstractQueryParserTestBase
-        public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC)
-        {
-            Debug.Assert(cqpC is Classic.QueryParser);
-            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
-            qp.DefaultOperator = QueryParserBase.Operator.OR;
-        }
-
-        // Moved to AbstractQueryParserTestBase
-        public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC)
-        {
-            Debug.Assert(cqpC is Classic.QueryParser);
-            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
-            qp.DefaultOperator = QueryParserBase.Operator.AND;
-        }
-
-        // Moved to AbstractQueryParserTestBase
-        public override void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC, bool value)
-        {
-            Debug.Assert(cqpC is Classic.QueryParser);
-            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
-            qp.AnalyzeRangeTerms = (value);
-        }
-
-        // Moved to AbstractQueryParserTestBase
-        public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC, bool value)
-        {
-            Debug.Assert(cqpC is Classic.QueryParser);
-            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
-            qp.AutoGeneratePhraseQueries = value;
-        }
-
-        // Moved to AbstractQueryParserTestBase
-        public override void SetDateResolution(ICommonQueryParserConfiguration cqpC, ICharSequence field, DateTools.Resolution value)
-        {
-            Debug.Assert(cqpC is Classic.QueryParser);
-            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
-            qp.SetDateResolution(field.toString(), value);
-        }
-
-        // Moved to AbstractQueryParserTestBase
-        public override Query GetQuery(string query, ICommonQueryParserConfiguration cqpC)
-        {
-            Debug.Assert(cqpC != null, "Parameter must not be null");
-            Debug.Assert(cqpC is Classic.QueryParser, "Parameter must be instance of QueryParser");
-            Classic.QueryParser qp = (Classic.QueryParser)cqpC;
-            return qp.Parse(query);
-        }
-
-        // Moved to AbstractQueryParserTestBase
-        public override Query GetQuery(string query, Analyzer a)
-        {
-            return GetParser(a).Parse(query);
-        }
-
-
-        // Moved to AbstractQueryParserTestBase
-        public override bool IsQueryParserException(Exception exception)
-        {
-            return exception is ParseException;
-        }
-
-        public Query GetQuery(string query)
-        {
-            return GetQuery(query, (Analyzer)null);
-        }
-
-        public void AssertQueryEquals(string query, Analyzer a, string result)
-        {
-            Query q = GetQuery(query, a);
-            string s = q.ToString("field");
-            if (!s.equals(result))
-            {
-                fail("Query /" + query + "/ yielded /" + s
-                     + "/, expecting /" + result + "/");
-            }
-        }
-
-        public void AssertQueryEquals(ICommonQueryParserConfiguration cqpC, string field, string query, string result)
-        {
-            Query q = GetQuery(query, cqpC);
-            string s = q.ToString(field);
-            if (!s.Equals(result))
-            {
-                fail("Query /" + query + "/ yielded /" + s
-                     + "/, expecting /" + result + "/");
-            }
-        }
-
-        public void AssertEscapedQueryEquals(string query, Analyzer a, string result)
-        {
-            string escapedQuery = QueryParserBase.Escape(query);
-            if (!escapedQuery.Equals(result))
-            {
-                fail("Query /" + query + "/ yielded /" + escapedQuery
-                    + "/, expecting /" + result + "/");
-            }
-        }
-
-        public void AssertWildcardQueryEquals(string query, bool lowercase, string result, bool allowLeadingWildcard)
-        {
-            ICommonQueryParserConfiguration cqpC = GetParserConfig(null);
-            cqpC.LowercaseExpandedTerms = lowercase;
-            cqpC.AllowLeadingWildcard = allowLeadingWildcard;
-            Query q = GetQuery(query, cqpC);
-            string s = q.ToString("field");
-            if (!s.equals(result))
-            {
-                fail("WildcardQuery /" + query + "/ yielded /" + s
-                     + "/, expecting /" + result + "/");
-            }
-        }
-
-        public void AssertWildcardQueryEquals(string query, bool lowercase, string result)
-        {
-            AssertWildcardQueryEquals(query, lowercase, result, false);
-        }
-
-        public void AssertWildcardQueryEquals(string query, string result)
-        {
-            Query q = GetQuery(query);
-            string s = q.ToString("field");
-            if (!s.Equals(result))
-            {
-                fail("WildcardQuery /" + query + "/ yielded /" + s + "/, expecting /"
-                    + result + "/");
-            }
-        }
-
-        public Query GetQueryDOA(string query, Analyzer a)
-        {
-            if (a == null)
-                a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
-            ICommonQueryParserConfiguration qp = GetParserConfig(a);
-            SetDefaultOperatorAND(qp);
-            return GetQuery(query, qp);
-        }
-
-        public void AssertQueryEqualsDOA(string query, Analyzer a, string result)
-        {
-            Query q = GetQueryDOA(query, a);
-            string s = q.ToString("field");
-            if (!s.Equals(result))
-            {
-                fail("Query /" + query + "/ yielded /" + s
-                     + "/, expecting /" + result + "/");
-            }
-        }
-
-        [Test]
-        public void TestCJK()
-        {
-            // Test Ideographic Space - As wide as a CJK character cell (fullwidth)
-            // used google to translate the word "term" to japanese -> \u7528\u8a9e
-            AssertQueryEquals("term\u3000term\u3000term", null, "term\u0020term\u0020term");
-            AssertQueryEquals("\u7528\u8a9e\u3000\u7528\u8a9e\u3000\u7528\u8a9e", null, "\u7528\u8a9e\u0020\u7528\u8a9e\u0020\u7528\u8a9e");
-        }
-
-        protected class SimpleCJKTokenizer : Tokenizer
-        {
-            private ICharTermAttribute termAtt;
-
-            public SimpleCJKTokenizer(System.IO.TextReader input)
-                : base(input)
-            {
-                termAtt = AddAttribute<ICharTermAttribute>();
-            }
-
-            public override sealed bool IncrementToken()
-            {
-                int ch = input.Read();
-                if (ch < 0)
-                    return false;
-                ClearAttributes();
-                termAtt.SetEmpty().Append((char)ch);
-                return true;
-            }
-        }
-
-        private class SimpleCJKAnalyzer : Analyzer
-        {
-            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
-            {
-                return new TokenStreamComponents(new SimpleCJKTokenizer(reader));
-            }
-        }
-
-        [Test]
-        public void TestCJKTerm()
-        {
-            // individual CJK chars as terms
-            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
-
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(new TermQuery(new Term("field", "\u4e2d")), BooleanClause.Occur.SHOULD);
-            expected.Add(new TermQuery(new Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
-
-            assertEquals(expected, GetQuery("\u4e2d\u56fd", analyzer));
-        }
-
-        [Test]
-        public void TestCJKBoostedTerm()
-        {
-            // individual CJK chars as terms
-            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
-
-            BooleanQuery expected = new BooleanQuery();
-            expected.Boost = (0.5f);
-            expected.Add(new TermQuery(new Term("field", "\u4e2d")), BooleanClause.Occur.SHOULD);
-            expected.Add(new TermQuery(new Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
-
-            assertEquals(expected, GetQuery("\u4e2d\u56fd^0.5", analyzer));
-        }
-
-        [Test]
-        public void TestCJKPhrase()
-        {
-            // individual CJK chars as terms
-            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
-
-            PhraseQuery expected = new PhraseQuery();
-            expected.Add(new Term("field", "\u4e2d"));
-            expected.Add(new Term("field", "\u56fd"));
-
-            assertEquals(expected, GetQuery("\"\u4e2d\u56fd\"", analyzer));
-        }
-
-        [Test]
-        public void TestCJKBoostedPhrase()
-        {
-            // individual CJK chars as terms
-            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
-
-            PhraseQuery expected = new PhraseQuery();
-            expected.Boost = (0.5f);
-            expected.Add(new Term("field", "\u4e2d"));
-            expected.Add(new Term("field", "\u56fd"));
-
-            assertEquals(expected, GetQuery("\"\u4e2d\u56fd\"^0.5", analyzer));
-        }
-
-        [Test]
-        public void TestCJKSloppyPhrase()
-        {
-            // individual CJK chars as terms
-            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
-
-            PhraseQuery expected = new PhraseQuery();
-            expected.Slop = (3);
-            expected.Add(new Term("field", "\u4e2d"));
-            expected.Add(new Term("field", "\u56fd"));
-
-            assertEquals(expected, GetQuery("\"\u4e2d\u56fd\"~3", analyzer));
-        }
-
-        [Test]
-        public void TestAutoGeneratePhraseQueriesOn()
-        {
-            // individual CJK chars as terms
-            SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();
-
-            PhraseQuery expected = new PhraseQuery();
-            expected.Add(new Term("field", "\u4e2d"));
-            expected.Add(new Term("field", "\u56fd"));
-            ICommonQueryParserConfiguration qp = GetParserConfig(analyzer);
-            SetAutoGeneratePhraseQueries(qp, true);
-            assertEquals(expected, GetQuery("\u4e2d\u56fd", qp));
-        }
-
-        [Test]
-        public void TestSimple()
-        {
-            AssertQueryEquals("term term term", null, "term term term");
-            AssertQueryEquals("t�rm term term", new MockAnalyzer(Random()), "t�rm term term");
-            AssertQueryEquals("�mlaut", new MockAnalyzer(Random()), "�mlaut");
-
-            // FIXME: enhance MockAnalyzer to be able to support this
-            // it must no longer extend CharTokenizer
-            //AssertQueryEquals("\"\"", new KeywordAnalyzer(), "");
-            //AssertQueryEquals("foo:\"\"", new KeywordAnalyzer(), "foo:");
-
-            AssertQueryEquals("a AND b", null, "+a +b");
-            AssertQueryEquals("(a AND b)", null, "+a +b");
-            AssertQueryEquals("c OR (a AND b)", null, "c (+a +b)");
-            AssertQueryEquals("a AND NOT b", null, "+a -b");
-            AssertQueryEquals("a AND -b", null, "+a -b");
-            AssertQueryEquals("a AND !b", null, "+a -b");
-            AssertQueryEquals("a && b", null, "+a +b");
-            //    AssertQueryEquals("a && ! b", null, "+a -b");
-
-            AssertQueryEquals("a OR b", null, "a b");
-            AssertQueryEquals("a || b", null, "a b");
-            AssertQueryEquals("a OR !b", null, "a -b");
-            //    AssertQueryEquals("a OR ! b", null, "a -b");
-            AssertQueryEquals("a OR -b", null, "a -b");
-
-            AssertQueryEquals("+term -term term", null, "+term -term term");
-            AssertQueryEquals("foo:term AND field:anotherTerm", null,
-                              "+foo:term +anotherterm");
-            AssertQueryEquals("term AND \"phrase phrase\"", null,
-                              "+term +\"phrase phrase\"");
-            AssertQueryEquals("\"hello there\"", null, "\"hello there\"");
-            assertTrue(GetQuery("a AND b") is BooleanQuery);
-            assertTrue(GetQuery("hello") is TermQuery);
-            assertTrue(GetQuery("\"hello there\"") is PhraseQuery);
-
-            AssertQueryEquals("germ term^2.0", null, "germ term^2.0");
-            AssertQueryEquals("(term)^2.0", null, "term^2.0");
-            AssertQueryEquals("(germ term)^2.0", null, "(germ term)^2.0");
-            AssertQueryEquals("term^2.0", null, "term^2.0");
-            AssertQueryEquals("term^2", null, "term^2.0");
-            AssertQueryEquals("\"germ term\"^2.0", null, "\"germ term\"^2.0");
-            AssertQueryEquals("\"term germ\"^2", null, "\"term germ\"^2.0");
-
-            AssertQueryEquals("(foo OR bar) AND (baz OR boo)", null,
-                              "+(foo bar) +(baz boo)");
-            AssertQueryEquals("((a OR b) AND NOT c) OR d", null,
-                              "(+(a b) -c) d");
-            AssertQueryEquals("+(apple \"steve jobs\") -(foo bar baz)", null,
-                              "+(apple \"steve jobs\") -(foo bar baz)");
-            AssertQueryEquals("+title:(dog OR cat) -author:\"bob dole\"", null,
-                              "+(title:dog title:cat) -author:\"bob dole\"");
-
-        }
-
-        // Moved to AbstractQueryParserTestBase
-        public override void TestDefaultOperator()
-        {
-            throw new NotImplementedException();
-        }
-
-        private class OperatorVsWhitespaceAnalyzer : Analyzer
-        {
-            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
-            {
-                return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
-            }
-        }
-
-        [Test]
-        public void TestOperatorVsWhitespace()
-        { //LUCENE-2566
-            // +,-,! should be directly adjacent to operand (i.e. not separated by whitespace) to be treated as an operator
-            Analyzer a = new OperatorVsWhitespaceAnalyzer();
-            AssertQueryEquals("a - b", a, "a - b");
-            AssertQueryEquals("a + b", a, "a + b");
-            AssertQueryEquals("a ! b", a, "a ! b");
-        }
-
-        [Test]
-        public void TestPunct()
-        {
-            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
-            AssertQueryEquals("a&b", a, "a&b");
-            AssertQueryEquals("a&&b", a, "a&&b");
-            AssertQueryEquals(".NET", a, ".NET");
-        }
-
-        [Test]
-        public void TestSlop()
-        {
-            AssertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2");
-            AssertQueryEquals("\"term germ\"~2 flork", null, "\"term germ\"~2 flork");
-            AssertQueryEquals("\"term\"~2", null, "term");
-            AssertQueryEquals("\" \"~2 germ", null, "germ");
-            AssertQueryEquals("\"term germ\"~2^2", null, "\"term germ\"~2^2.0");
-        }
-
-        [Test]
-        public void TestNumber()
-        {
-            // The numbers go away because SimpleAnalzyer ignores them
-            AssertQueryEquals("3", null, "");
-            AssertQueryEquals("term 1.0 1 2", null, "term");
-            AssertQueryEquals("term term1 term2", null, "term term term");
-
-            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, true);
-            AssertQueryEquals("3", a, "3");
-            AssertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2");
-            AssertQueryEquals("term term1 term2", a, "term term1 term2");
-        }
-
-        [Test]
-        public void TestWildcard()
-        {
-            AssertQueryEquals("term*", null, "term*");
-            AssertQueryEquals("term*^2", null, "term*^2.0");
-            AssertQueryEquals("term~", null, "term~2");
-            AssertQueryEquals("term~1", null, "term~1");
-            AssertQueryEquals("term~0.7", null, "term~1");
-            AssertQueryEquals("term~^3", null, "term~2^3.0");
-            AssertQueryEquals("term^3~", null, "term~2^3.0");
-            AssertQueryEquals("term*germ", null, "term*germ");
-            AssertQueryEquals("term*germ^3", null, "term*germ^3.0");
-
-            assertTrue(GetQuery("term*") is PrefixQuery);
-            assertTrue(GetQuery("term*^2") is PrefixQuery);
-            assertTrue(GetQuery("term~") is FuzzyQuery);
-            assertTrue(GetQuery("term~0.7") is FuzzyQuery);
-            FuzzyQuery fq = (FuzzyQuery)GetQuery("term~0.7");
-            assertEquals(1, fq.MaxEdits);
-            assertEquals(FuzzyQuery.DefaultPrefixLength, fq.PrefixLength);
-            fq = (FuzzyQuery)GetQuery("term~");
-            assertEquals(2, fq.MaxEdits);
-            assertEquals(FuzzyQuery.DefaultPrefixLength, fq.PrefixLength);
-
-            AssertParseException("term~1.1"); // value > 1, throws exception
-
-            assertTrue(GetQuery("term*germ") is WildcardQuery);
-
-            /* Tests to see that wild card terms are (or are not) properly
-               * lower-cased with propery parser configuration
-               */
-            // First prefix queries:
-            // by default, convert to lowercase:
-            AssertWildcardQueryEquals("Term*", true, "term*");
-            // explicitly set lowercase:
-            AssertWildcardQueryEquals("term*", true, "term*");
-            AssertWildcardQueryEquals("Term*", true, "term*");
-            AssertWildcardQueryEquals("TERM*", true, "term*");
-            // explicitly disable lowercase conversion:
-            AssertWildcardQueryEquals("term*", false, "term*");
-            AssertWildcardQueryEquals("Term*", false, "Term*");
-            AssertWildcardQueryEquals("TERM*", false, "TERM*");
-            // Then 'full' wildcard queries:
-            // by default, convert to lowercase:
-            AssertWildcardQueryEquals("Te?m", "te?m");
-            // explicitly set lowercase:
-            AssertWildcardQueryEquals("te?m", true, "te?m");
-            AssertWildcardQueryEquals("Te?m", true, "te?m");
-            AssertWildcardQueryEquals("TE?M", true, "te?m");
-            AssertWildcardQueryEquals("Te?m*gerM", true, "te?m*germ");
-            // explicitly disable lowercase conversion:
-            AssertWildcardQueryEquals("te?m", false, "te?m");
-            AssertWildcardQueryEquals("Te?m", false, "Te?m");
-            AssertWildcardQueryEquals("TE?M", false, "TE?M");
-            AssertWildcardQueryEquals("Te?m*gerM", false, "Te?m*gerM");
-            //  Fuzzy queries:
-            AssertWildcardQueryEquals("Term~", "term~2");
-            AssertWildcardQueryEquals("Term~", true, "term~2");
-            AssertWildcardQueryEquals("Term~", false, "Term~2");
-            //  Range queries:
-            AssertWildcardQueryEquals("[A TO C]", "[a TO c]");
-            AssertWildcardQueryEquals("[A TO C]", true, "[a TO c]");
-            AssertWildcardQueryEquals("[A TO C]", false, "[A TO C]");
-            // Test suffix queries: first disallow
-            try
-            {
-                AssertWildcardQueryEquals("*Term", true, "*term");
-            }
-            catch (Exception pe)
-            {
-                // expected exception
-                if (!IsQueryParserException(pe))
-                {
-                    fail();
-                }
-            }
-            try
-            {
-                AssertWildcardQueryEquals("?Term", true, "?term");
-                fail();
-            }
-            catch (Exception pe)
-            {
-                // expected exception
-                if (!IsQueryParserException(pe))
-                {
-                    fail();
-                }
-            }
-            // Test suffix queries: then allow
-            AssertWildcardQueryEquals("*Term", true, "*term", true);
-            AssertWildcardQueryEquals("?Term", true, "?term", true);
-        }
-
-        [Test]
-        public void TestLeadingWildcardType()
-        {
-            ICommonQueryParserConfiguration cqpC = GetParserConfig(null);
-            cqpC.AllowLeadingWildcard = (true);
-            assertEquals(typeof(WildcardQuery), GetQuery("t*erm*", cqpC).GetType());
-            assertEquals(typeof(WildcardQuery), GetQuery("?term*", cqpC).GetType());
-            assertEquals(typeof(WildcardQuery), GetQuery("*term*", cqpC).GetType());
-        }
-
-        [Test]
-        public void TestQPA()
-        {
-            AssertQueryEquals("term term^3.0 term", qpAnalyzer, "term term^3.0 term");
-            AssertQueryEquals("term stop^3.0 term", qpAnalyzer, "term term");
-
-            AssertQueryEquals("term term term", qpAnalyzer, "term term term");
-            AssertQueryEquals("term +stop term", qpAnalyzer, "term term");
-            AssertQueryEquals("term -stop term", qpAnalyzer, "term term");
-
-            AssertQueryEquals("drop AND (stop) AND roll", qpAnalyzer, "+drop +roll");
-            AssertQueryEquals("term +(stop) term", qpAnalyzer, "term term");
-            AssertQueryEquals("term -(stop) term", qpAnalyzer, "term term");
-
-            AssertQueryEquals("drop AND stop AND roll", qpAnalyzer, "+drop +roll");
-            AssertQueryEquals("term phrase term", qpAnalyzer,
-                              "term (phrase1 phrase2) term");
-            AssertQueryEquals("term AND NOT phrase term", qpAnalyzer,
-                              "+term -(phrase1 phrase2) term");
-            AssertQueryEquals("stop^3", qpAnalyzer, "");
-            AssertQueryEquals("stop", qpAnalyzer, "");
-            AssertQueryEquals("(stop)^3", qpAnalyzer, "");
-            AssertQueryEquals("((stop))^3", qpAnalyzer, "");
-            AssertQueryEquals("(stop^3)", qpAnalyzer, "");
-            AssertQueryEquals("((stop)^3)", qpAnalyzer, "");
-            AssertQueryEquals("(stop)", qpAnalyzer, "");
-            AssertQueryEquals("((stop))", qpAnalyzer, "");
-            assertTrue(GetQuery("term term term", qpAnalyzer) is BooleanQuery);
-            assertTrue(GetQuery("term +stop", qpAnalyzer) is TermQuery);
-
-            ICommonQueryParserConfiguration cqpc = GetParserConfig(qpAnalyzer);
-            SetDefaultOperatorAND(cqpc);
-            AssertQueryEquals(cqpc, "field", "term phrase term",
-                "+term +(+phrase1 +phrase2) +term");
-            AssertQueryEquals(cqpc, "field", "phrase",
-                "+phrase1 +phrase2");
-        }
-
-        [Test]
-        public void TestRange()
-        {
-            AssertQueryEquals("[ a TO z]", null, "[a TO z]");
-            AssertQueryEquals("[ a TO z}", null, "[a TO z}");
-            AssertQueryEquals("{ a TO z]", null, "{a TO z]");
-
-            assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((TermRangeQuery)GetQuery("[ a TO z]")).GetRewriteMethod());
-
-            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true));
-
-            qp.MultiTermRewriteMethod=(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
-            assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE, ((TermRangeQuery)GetQuery("[ a TO z]", qp)).GetRewriteMethod());
-
-            // test open ranges
-            AssertQueryEquals("[ a TO * ]", null, "[a TO *]");
-            AssertQueryEquals("[ * TO z ]", null, "[* TO z]");
-            AssertQueryEquals("[ * TO * ]", null, "[* TO *]");
-
-            // mixing exclude and include bounds
-            AssertQueryEquals("{ a TO z ]", null, "{a TO z]");
-            AssertQueryEquals("[ a TO z }", null, "[a TO z}");
-            AssertQueryEquals("{ a TO * ]", null, "{a TO *]");
-            AssertQueryEquals("[ * TO z }", null, "[* TO z}");
-
-            AssertQueryEquals("[ a TO z ]", null, "[a TO z]");
-            AssertQueryEquals("{ a TO z}", null, "{a TO z}");
-            AssertQueryEquals("{ a TO z }", null, "{a TO z}");
-            AssertQueryEquals("{ a TO z }^2.0", null, "{a TO z}^2.0");
-            AssertQueryEquals("[ a TO z] OR bar", null, "[a TO z] bar");
-            AssertQueryEquals("[ a TO z] AND bar", null, "+[a TO z] +bar");
-            AssertQueryEquals("( bar blar { a TO z}) ", null, "bar blar {a TO z}");
-            AssertQueryEquals("gack ( bar blar { a TO z}) ", null, "gack (bar blar {a TO z})");
-
-            AssertQueryEquals("[* TO Z]", null, "[* TO z]");
-            AssertQueryEquals("[A TO *]", null, "[a TO *]");
-            AssertQueryEquals("[* TO *]", null, "[* TO *]");
-        }
-
-        [Test]
-        public void TestRangeWithPhrase()
-        {
-            AssertQueryEquals("[\\* TO \"*\"]", null, "[\\* TO \\*]");
-            AssertQueryEquals("[\"*\" TO *]", null, "[\\* TO *]");
-        }
-
-        private string EscapeDateString(string s)
-        {
-            if (s.IndexOf(" ") > -1)
-            {
-                return "\"" + s + "\"";
-            }
-            else
-            {
-                return s;
-            }
-        }
-
-        /// <summary>for testing DateTools support</summary>
-        private string GetDate(string s, DateTools.Resolution resolution)
-        {
-            // TODO: Is this the correct way to parse the string?
-            DateTime d = DateTime.Parse(s, System.Globalization.CultureInfo.InvariantCulture);
-            return GetDate(d, resolution);
-
-            //// we use the default Locale since LuceneTestCase randomizes it
-            //DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, Locale.getDefault());
-            //return GetDate(df.Parse(s), resolution);      
-        }
-
-        /// <summary>for testing DateTools support</summary>
-        private string GetDate(DateTime d, DateTools.Resolution resolution)
-        {
-            return DateTools.DateToString(d, resolution);
-        }
-
-        private string GetLocalizedDate(int year, int month, int day)
-        {
-            // TODO: Is this the right way to get the localized date?
-            DateTime d = new DateTime(year, month, day, 23, 59, 59, 999);
-            return d.ToString();
-
-            //// we use the default Locale/TZ since LuceneTestCase randomizes it
-            //DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, Locale.getDefault());
-            //Calendar calendar = new GregorianCalendar(TimeZone.getDefault(), Locale.getDefault());
-            //calendar.clear();
-            //calendar.set(year, month, day);
-            //calendar.set(Calendar.HOUR_OF_DAY, 23);
-            //calendar.set(Calendar.MINUTE, 59);
-            //calendar.set(Calendar.SECOND, 59);
-            //calendar.set(Calendar.MILLISECOND, 999);
-            //return df.format(calendar.getTime());
-        }
-
-        // TODO: Fix this test
-        [Test]
-        public void TestDateRange()
-        {
-            Assert.Fail("Test is not implemented");
-
-        //    string startDate = GetLocalizedDate(2002, 1, 1);
-        //    string endDate = GetLocalizedDate(2002, 1, 4);
-        //    // we use the default Locale/TZ since LuceneTestCase randomizes it
-        //    Calendar endDateExpected = new GregorianCalendar(TimeZone.getDefault(), Locale.getDefault());
-        //    endDateExpected.clear();
-        //    endDateExpected.set(2002, 1, 4, 23, 59, 59);
-        //    endDateExpected.set(Calendar.MILLISECOND, 999);
-        //    string defaultField = "default";
-        //    string monthField = "month";
-        //    string hourField = "hour";
-        //    Analyzer a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
-        //    CommonQueryParserConfiguration qp = GetParserConfig(a);
-
-        //    // set a field specific date resolution
-        //    SetDateResolution(qp, monthField, DateTools.Resolution.MONTH);
-
-        //    // set default date resolution to MILLISECOND
-        //    qp.SetDateResolution(DateTools.Resolution.MILLISECOND);
-
-        //    // set second field specific date resolution    
-        //    SetDateResolution(qp, hourField, DateTools.Resolution.HOUR);
-
-        //    // for this field no field specific date resolution has been set,
-        //    // so verify if the default resolution is used
-        //    AssertDateRangeQueryEquals(qp, defaultField, startDate, endDate,
-        //            endDateExpected.getTime(), DateTools.Resolution.MILLISECOND);
-
-        //    // verify if field specific date resolutions are used for these two fields
-        //    AssertDateRangeQueryEquals(qp, monthField, startDate, endDate,
-        //            endDateExpected.getTime(), DateTools.Resolution.MONTH);
-
-        //    AssertDateRangeQueryEquals(qp, hourField, startDate, endDate,
-        //            endDateExpected.getTime(), DateTools.Resolution.HOUR);
-        }
-
-        public void AssertDateRangeQueryEquals(ICommonQueryParserConfiguration cqpC, string field, string startDate, string endDate,
-            DateTime endDateInclusive, DateTools.Resolution resolution)
-        {
-            AssertQueryEquals(cqpC, field, field + ":[" + EscapeDateString(startDate) + " TO " + EscapeDateString(endDate) + "]",
-                       "[" + GetDate(startDate, resolution) + " TO " + GetDate(endDateInclusive, resolution) + "]");
-            AssertQueryEquals(cqpC, field, field + ":{" + EscapeDateString(startDate) + " TO " + EscapeDateString(endDate) + "}",
-                       "{" + GetDate(startDate, resolution) + " TO " + GetDate(endDate, resolution) + "}");
-        }
-
-        [Test]
-        public void TestEscaped()
-        {
-            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
-
-            /*AssertQueryEquals("\\[brackets", a, "\\[brackets");
-            AssertQueryEquals("\\[brackets", null, "brackets");
-            AssertQueryEquals("\\\\", a, "\\\\");
-            AssertQueryEquals("\\+blah", a, "\\+blah");
-            AssertQueryEquals("\\(blah", a, "\\(blah");
-
-            AssertQueryEquals("\\-blah", a, "\\-blah");
-            AssertQueryEquals("\\!blah", a, "\\!blah");
-            AssertQueryEquals("\\{blah", a, "\\{blah");
-            AssertQueryEquals("\\}blah", a, "\\}blah");
-            AssertQueryEquals("\\:blah", a, "\\:blah");
-            AssertQueryEquals("\\^blah", a, "\\^blah");
-            AssertQueryEquals("\\[blah", a, "\\[blah");
-            AssertQueryEquals("\\]blah", a, "\\]blah");
-            AssertQueryEquals("\\\"blah", a, "\\\"blah");
-            AssertQueryEquals("\\(blah", a, "\\(blah");
-            AssertQueryEquals("\\)blah", a, "\\)blah");
-            AssertQueryEquals("\\~blah", a, "\\~blah");
-            AssertQueryEquals("\\*blah", a, "\\*blah");
-            AssertQueryEquals("\\?blah", a, "\\?blah");
-            //AssertQueryEquals("foo \\&\\& bar", a, "foo \\&\\& bar");
-            //AssertQueryEquals("foo \\|| bar", a, "foo \\|| bar");
-            //AssertQueryEquals("foo \\AND bar", a, "foo \\AND bar");*/
-
-            AssertQueryEquals("\\a", a, "a");
-
-            AssertQueryEquals("a\\-b:c", a, "a-b:c");
-            AssertQueryEquals("a\\+b:c", a, "a+b:c");
-            AssertQueryEquals("a\\:b:c", a, "a:b:c");
-            AssertQueryEquals("a\\\\b:c", a, "a\\b:c");
-
-            AssertQueryEquals("a:b\\-c", a, "a:b-c");
-            AssertQueryEquals("a:b\\+c", a, "a:b+c");
-            AssertQueryEquals("a:b\\:c", a, "a:b:c");
-            AssertQueryEquals("a:b\\\\c", a, "a:b\\c");
-
-            AssertQueryEquals("a:b\\-c*", a, "a:b-c*");
-            AssertQueryEquals("a:b\\+c*", a, "a:b+c*");
-            AssertQueryEquals("a:b\\:c*", a, "a:b:c*");
-
-            AssertQueryEquals("a:b\\\\c*", a, "a:b\\c*");
-
-            AssertQueryEquals("a:b\\-c~", a, "a:b-c~2");
-            AssertQueryEquals("a:b\\+c~", a, "a:b+c~2");
-            AssertQueryEquals("a:b\\:c~", a, "a:b:c~2");
-            AssertQueryEquals("a:b\\\\c~", a, "a:b\\c~2");
-
-            AssertQueryEquals("[ a\\- TO a\\+ ]", null, "[a- TO a+]");
-            AssertQueryEquals("[ a\\: TO a\\~ ]", null, "[a: TO a~]");
-            AssertQueryEquals("[ a\\\\ TO a\\* ]", null, "[a\\ TO a*]");
-
-            AssertQueryEquals("[\"c\\:\\\\temp\\\\\\~foo0.txt\" TO \"c\\:\\\\temp\\\\\\~foo9.txt\"]", a,
-                              "[c:\\temp\\~foo0.txt TO c:\\temp\\~foo9.txt]");
-
-            AssertQueryEquals("a\\\\\\+b", a, "a\\+b");
-
-            AssertQueryEquals("a \\\"b c\\\" d", a, "a \"b c\" d");
-            AssertQueryEquals("\"a \\\"b c\\\" d\"", a, "\"a \"b c\" d\"");
-            AssertQueryEquals("\"a \\+b c d\"", a, "\"a +b c d\"");
-
-            AssertQueryEquals("c\\:\\\\temp\\\\\\~foo.txt", a, "c:\\temp\\~foo.txt");
-
-            AssertParseException("XY\\"); // there must be a character after the escape char
-
-            // test unicode escaping
-            AssertQueryEquals("a\\u0062c", a, "abc");
-            AssertQueryEquals("XY\\u005a", a, "XYZ");
-            AssertQueryEquals("XY\\u005A", a, "XYZ");
-            AssertQueryEquals("\"a \\\\\\u0028\\u0062\\\" c\"", a, "\"a \\(b\" c\"");
-
-            AssertParseException("XY\\u005G");  // test non-hex character in escaped unicode sequence
-            AssertParseException("XY\\u005");   // test incomplete escaped unicode sequence
-
-            // Tests bug LUCENE-800
-            AssertQueryEquals("(item:\\\\ item:ABCD\\\\)", a, "item:\\ item:ABCD\\");
-            AssertParseException("(item:\\\\ item:ABCD\\\\))"); // unmatched closing paranthesis 
-            AssertQueryEquals("\\*", a, "*");
-            AssertQueryEquals("\\\\", a, "\\");  // escaped backslash
-
-            AssertParseException("\\"); // a backslash must always be escaped
-
-            // LUCENE-1189
-            AssertQueryEquals("(\"a\\\\\") or (\"b\")", a, "a\\ or b");
-        }
-
-        [Test]
-        public void TestEscapedVsQuestionMarkAsWildcard()
-        {
-            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
-            AssertQueryEquals("a:b\\-?c", a, "a:b\\-?c");
-            AssertQueryEquals("a:b\\+?c", a, "a:b\\+?c");
-            AssertQueryEquals("a:b\\:?c", a, "a:b\\:?c");
-
-            AssertQueryEquals("a:b\\\\?c", a, "a:b\\\\?c");
-        }
-
-        [Test]
-        public void TestQueryStringEscaping()
-        {
-            Analyzer a = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
-
-            AssertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c");
-            AssertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c");
-            AssertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
-            AssertEscapedQueryEquals("a\\b:c", a, "a\\\\b\\:c");
-
-            AssertEscapedQueryEquals("a:b-c", a, "a\\:b\\-c");
-            AssertEscapedQueryEquals("a:b+c", a, "a\\:b\\+c");
-            AssertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
-            AssertEscapedQueryEquals("a:b\\c", a, "a\\:b\\\\c");
-
-            AssertEscapedQueryEquals("a:b-c*", a, "a\\:b\\-c\\*");
-            AssertEscapedQueryEquals("a:b+c*", a, "a\\:b\\+c\\*");
-            AssertEscapedQueryEquals("a:b:c*", a, "a\\:b\\:c\\*");
-
-            AssertEscapedQueryEquals("a:b\\\\c*", a, "a\\:b\\\\\\\\c\\*");
-
-            AssertEscapedQueryEquals("a:b-?c", a, "a\\:b\\-\\?c");
-            AssertEscapedQueryEquals("a:b+?c", a, "a\\:b\\+\\?c");
-            AssertEscapedQueryEquals("a:b:?c", a, "a\\:b\\:\\?c");
-
-            AssertEscapedQueryEquals("a:b?c", a, "a\\:b\\?c");
-
-            AssertEscapedQueryEquals("a:b-c~", a, "a\\:b\\-c\\~");
-            AssertEscapedQueryEquals("a:b+c~", a, "a\\:b\\+c\\~");
-            AssertEscapedQueryEquals("a:b:c~", a, "a\\:b\\:c\\~");
-            AssertEscapedQueryEquals("a:b\\c~", a, "a\\:b\\\\c\\~");
-
-            AssertEscapedQueryEquals("[ a - TO a+ ]", null, "\\[ a \\- TO a\\+ \\]");
-            AssertEscapedQueryEquals("[ a : TO a~ ]", null, "\\[ a \\: TO a\\~ \\]");
-            AssertEscapedQueryEquals("[ a\\ TO a* ]", null, "\\[ a\\\\ TO a\\* \\]");
-
-            // LUCENE-881
-            AssertEscapedQueryEquals("|| abc ||", a, "\\|\\| abc \\|\\|");
-            AssertEscapedQueryEquals("&& abc &&", a, "\\&\\& abc \\&\\&");
-        }
-
-        [Test]
-        public void TestTabNewlineCarriageReturn()
-        {
-            AssertQueryEqualsDOA("+weltbank +worlbank", null,
-              "+weltbank +worlbank");
-
-            AssertQueryEqualsDOA("+weltbank\n+worlbank", null,
-              "+weltbank +worlbank");
-            AssertQueryEqualsDOA("weltbank \n+worlbank", null,
-              "+weltbank +worlbank");
-            AssertQueryEqualsDOA("weltbank \n +worlbank", null,
-              "+weltbank +worlbank");
-
-            AssertQueryEqualsDOA("+weltbank\r+worlbank", null,
-              "+weltbank +worlbank");
-            AssertQueryEqualsDOA("weltbank \r+worlbank", null,
-              "+weltbank +worlbank");
-            AssertQueryEqualsDOA("weltbank \r +worlbank", null,
-              "+weltbank +worlbank");
-
-            AssertQueryEqualsDOA("+weltbank\r\n+worlbank", null,
-              "+weltbank +worlbank");
-            AssertQueryEqualsDOA("weltbank \r\n+worlbank", null,
-              "+weltbank +worlbank");
-            AssertQueryEqualsDOA("weltbank \r\n +worlbank", null,
-              "+weltbank +worlbank");
-            AssertQueryEqualsDOA("weltbank \r \n +worlbank", null,
-              "+weltbank +worlbank");
-
-            AssertQueryEqualsDOA("+weltbank\t+worlbank", null,
-              "+weltbank +worlbank");
-            AssertQueryEqualsDOA("weltbank \t+worlbank", null,
-              "+weltbank +worlbank");
-            AssertQueryEqualsDOA("weltbank \t +worlbank", null,
-              "+weltbank +worlbank");
-        }
-
-        [Test]
-        public void TestSimpleDAO()
-        {
-            AssertQueryEqualsDOA("term term term", null, "+term +term +term");
-            AssertQueryEqualsDOA("term +term term", null, "+term +term +term");
-            AssertQueryEqualsDOA("term term +term", null, "+term +term +term");
-            AssertQueryEqualsDOA("term +term +term", null, "+term +term +term");
-            AssertQueryEqualsDOA("-term term term", null, "-term +term +term");
-        }
-
-        [Test]
-        public void TestBoost()
-        {
-            CharacterRunAutomaton stopWords = new CharacterRunAutomaton(BasicAutomata.MakeString("on"));
-            Analyzer oneStopAnalyzer = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, stopWords);
-            ICommonQueryParserConfiguration qp = GetParserConfig(oneStopAnalyzer);
-            Query q = GetQuery("on^1.0", qp);
-            assertNotNull(q);
-            q = GetQuery("\"hello\"^2.0", qp);
-            assertNotNull(q);
-            assertEquals(q.Boost, (float)2.0, (float)0.5);
-            q = GetQuery("hello^2.0", qp);
-            assertNotNull(q);
-            assertEquals(q.Boost, (float)2.0, (float)0.5);
-            q = GetQuery("\"on\"^1.0", qp);
-            assertNotNull(q);
-
-            Analyzer a2 = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
-            ICommonQueryParserConfiguration qp2 = GetParserConfig(a2);
-            q = GetQuery("the^3", qp2);
-            // "the" is a stop word so the result is an empty query:
-            assertNotNull(q);
-            assertEquals("", q.toString());
-            assertEquals(1.0f, q.Boost, 0.01f);
-        }
-
-        public void AssertParseException(string queryString)
-        {
-            try
-            {
-                GetQuery(queryString);
-            }
-            catch (Exception expected)
-            {
-                if (IsQueryParserException(expected))
-                {
-                    return;
-                }
-            }
-            fail("ParseException expected, not thrown");
-        }
-
-        public void AssertParseException(string queryString, Analyzer a)
-        {
-            try
-            {
-                GetQuery(queryString, a);
-            }
-            catch (Exception expected)
-            {
-                if (IsQueryParserException(expected))
-                {
-                    return;
-                }
-            }
-            fail("ParseException expected, not thrown");
-        }
-
-        [Test]
-        public void TestException()
-        {
-            AssertParseException("\"some phrase");
-            AssertParseException("(foo bar");
-            AssertParseException("foo bar))");
-            AssertParseException("field:term:with:colon some more terms");
-            AssertParseException("(sub query)^5.0^2.0 plus more");
-            AssertParseException("secret AND illegal) AND access:confidential");
-        }
-
-        [Test]
-        public void TestBooleanQuery()
-        {
-            BooleanQuery.MaxClauseCount = (2);
-            Analyzer purWhitespaceAnalyzer = new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
-            AssertParseException("one two three", purWhitespaceAnalyzer);
-        }
-
-        [Test]
-        public void TestPrecedence()
-        {
-            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
-            Query query1 = GetQuery("A AND B OR C AND D", qp);
-            Query query2 = GetQuery("+A +B +C +D", qp);
-            assertEquals(query1, query2);
-        }
-
-        // LUCENETODO: convert this from DateField to DateUtil
-        //  public void testLocalDateFormat() throws IOException, ParseException {
-        //    Directory ramDir = newDirectory();
-        //    IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
-        //    addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
-        //    addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
-        //    iw.close();
-        //    IndexSearcher is = new IndexSearcher(ramDir, true);
-        //    assertHits(1, "[12/1/2005 TO 12/3/2005]", is);
-        //    assertHits(2, "[12/1/2005 TO 12/4/2005]", is);
-        //    assertHits(1, "[12/3/2005 TO 12/4/2005]", is);
-        //    assertHits(1, "{12/1/2005 TO 12/3/2005}", is);
-        //    assertHits(1, "{12/1/2005 TO 12/4/2005}", is);
-        //    assertHits(0, "{12/3/2005 TO 12/4/2005}", is);
-        //    is.close();
-        //    ramDir.close();
-        //  }
-        //
-        //  private void addDateDoc(String content, int year, int month,
-        //                          int day, int hour, int minute, int second, IndexWriter iw) throws IOException {
-        //    Document d = new Document();
-        //    d.add(newField("f", content, Field.Store.YES, Field.Index.ANALYZED));
-        //    Calendar cal = Calendar.getInstance(Locale.ENGLISH);
-        //    cal.set(year, month - 1, day, hour, minute, second);
-        //    d.add(newField("date", DateField.dateToString(cal.getTime()), Field.Store.YES, Field.Index.NOT_ANALYZED));
-        //    iw.addDocument(d);
-        //  }
-
-        // Moved to AbstractQueryParserTestBase
-        public override void TestStarParsing()
-        {
-            throw new NotImplementedException();
-        }
-
-        [Test]
-        public void TestEscapedWildcard()
-        {
-            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
-            WildcardQuery q = new WildcardQuery(new Term("field", "foo\\?ba?r"));
-            assertEquals(q, GetQuery("foo\\?ba?r", qp));
-        }
-
-        [Test]
-        public void TestRegexps()
-        {
-            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
-            RegexpQuery q = new RegexpQuery(new Term("field", "[a-z][123]"));
-            assertEquals(q, GetQuery("/[a-z][123]/", qp));
-            qp.LowercaseExpandedTerms = (true);
-            assertEquals(q, GetQuery("/[A-Z][123]/", qp));
-            q.Boost = (0.5f);
-            assertEquals(q, GetQuery("/[A-Z][123]/^0.5", qp));
-            qp.MultiTermRewriteMethod=(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
-            q.SetRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
-            assertTrue(GetQuery("/[A-Z][123]/^0.5", qp) is RegexpQuery);
-            assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE, ((RegexpQuery)GetQuery("/[A-Z][123]/^0.5", qp)).GetRewriteMethod());
-            assertEquals(q, GetQuery("/[A-Z][123]/^0.5", qp));
-            qp.MultiTermRewriteMethod=(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT);
-
-            Query escaped = new RegexpQuery(new Term("field", "[a-z]\\/[123]"));
-            assertEquals(escaped, GetQuery("/[a-z]\\/[123]/", qp));
-            Query escaped2 = new RegexpQuery(new Term("field", "[a-z]\\*[123]"));
-            assertEquals(escaped2, GetQuery("/[a-z]\\*[123]/", qp));
-
-            BooleanQuery complex = new BooleanQuery();
-            complex.Add(new RegexpQuery(new Term("field", "[a-z]\\/[123]")), BooleanClause.Occur.MUST);
-            complex.Add(new TermQuery(new Term("path", "/etc/init.d/")), BooleanClause.Occur.MUST);
-            complex.Add(new TermQuery(new Term("field", "/etc/init[.]d/lucene/")), BooleanClause.Occur.SHOULD);
-            assertEquals(complex, GetQuery("/[a-z]\\/[123]/ AND path:\"/etc/init.d/\" OR \"/etc\\/init\\[.\\]d/lucene/\" ", qp));
-
-            Query re = new RegexpQuery(new Term("field", "http.*"));
-            assertEquals(re, GetQuery("field:/http.*/", qp));
-            assertEquals(re, GetQuery("/http.*/", qp));
-
-            re = new RegexpQuery(new Term("field", "http~0.5"));
-            assertEquals(re, GetQuery("field:/http~0.5/", qp));
-            assertEquals(re, GetQuery("/http~0.5/", qp));
-
-            re = new RegexpQuery(new Term("field", "boo"));
-            assertEquals(re, GetQuery("field:/boo/", qp));
-            assertEquals(re, GetQuery("/boo/", qp));
-
-            assertEquals(new TermQuery(new Term("field", "/boo/")), GetQuery("\"/boo/\"", qp));
-            assertEquals(new TermQuery(new Term("field", "/boo/")), GetQuery("\\/boo\\/", qp));
-
-            BooleanQuery two = new BooleanQuery();
-            two.Add(new RegexpQuery(new Term("field", "foo")), BooleanClause.Occur.SHOULD);
-            two.Add(new RegexpQuery(new Term("field", "bar")), BooleanClause.Occur.SHOULD);
-            assertEquals(two, GetQuery("field:/foo/ field:/bar/", qp));
-            assertEquals(two, GetQuery("/foo/ /bar/", qp));
-        }
-
-        [Test]
-        public void TestStopwords()
-        {
-            CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").ToAutomaton());
-            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, stopSet));
-            Query result = GetQuery("field:the OR field:foo", qp);
-            assertNotNull("result is null and it shouldn't be", result);
-            assertTrue("result is not a BooleanQuery", result is BooleanQuery);
-            assertTrue(((BooleanQuery)result).Clauses.Length + " does not equal: " + 0, ((BooleanQuery)result).Clauses.Length == 0);
-            result = GetQuery("field:woo OR field:the", qp);
-            assertNotNull("result is null and it shouldn't be", result);
-            assertTrue("result is not a TermQuery", result is TermQuery);
-            result = GetQuery("(fieldX:xxxxx OR fieldy:xxxxxxxx)^2 AND (fieldx:the OR fieldy:foo)", qp);
-            assertNotNull("result is null and it shouldn't be", result);
-            assertTrue("result is not a BooleanQuery", result is BooleanQuery);
-            if (VERBOSE) Console.WriteLine("Result: " + result);
-            assertTrue(((BooleanQuery)result).Clauses.Length + " does not equal: " + 2, ((BooleanQuery)result).Clauses.Length == 2);
-        }
-
-        [Test]
-        public void TestPositionIncrement()
-        {
-            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET));
-            qp.EnablePositionIncrements = (true);
-            String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\"";
-            //               0         2                      5           7  8
-            int[] expectedPositions = { 1, 3, 4, 6, 9 };
-            PhraseQuery pq = (PhraseQuery)GetQuery(qtxt, qp);
-            //System.out.println("Query text: "+qtxt);
-            //System.out.println("Result: "+pq);
-            Term[] t = pq.Terms;
-            int[] pos = pq.Positions;
-            for (int i = 0; i < t.Length; i++)
-            {
-                //System.out.println(i+". "+t[i]+"  pos: "+pos[i]);
-                assertEquals("term " + i + " = " + t[i] + " has wrong term-position!", expectedPositions[i], pos[i]);
-            }
-        }
-
-        [Test]
-        public void TestMatchAllDocs()
-        {
-            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
-            assertEquals(new MatchAllDocsQuery(), GetQuery("*:*", qp));
-            assertEquals(new MatchAllDocsQuery(), GetQuery("(*:*)", qp));
-            BooleanQuery bq = (BooleanQuery)GetQuery("+*:* -*:*", qp);
-            assertTrue(bq.Clauses[0].Query is MatchAllDocsQuery);
-            assertTrue(bq.Clauses[1].Query is MatchAllDocsQuery);
-        }
-
-        private void AssertHits(int expected, String query, IndexSearcher @is)
-        {
-            string oldDefaultField = DefaultField;
-            DefaultField = "date";
-            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false));
-            qp.Locale = System.Globalization.CultureInfo.GetCultureInfo("en");
-            Query q = GetQuery(query, qp);
-            ScoreDoc[] hits = @is.Search(q, null, 1000).ScoreDocs;
-            assertEquals(expected, hits.Length);
-            DefaultField = oldDefaultField;
-        }
-
-        public override void TearDown()
-        {
-            BooleanQuery.MaxClauseCount = originalMaxClauses;
-            base.TearDown();
-        }
-
-        // LUCENE-2002: make sure defaults for StandardAnalyzer's
-        // enableStopPositionIncr & QueryParser's enablePosIncr
-        // "match"
-        [Test]
-        public void TestPositionIncrements()
-        {
-            using (Directory dir = NewDirectory())
-            {
-                Analyzer a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
-                using (IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, a)))
-                {
-                    Document doc = new Document();
-                    doc.Add(NewTextField("field", "the wizard of ozzy", Field.Store.NO));
-                    w.AddDocument(doc);
-                    using (IndexReader r = DirectoryReader.Open(w, true))
-                    {
-                        IndexSearcher s = NewSearcher(r);
-
-                        Query q = GetQuery("\"wizard of ozzy\"", a);
-                        assertEquals(1, s.Search(q, 1).TotalHits);
-                    }
-                }
-            }
-        }
-
-        /// <summary>
-        /// adds synonym of "dog" for "dogs".
-        /// </summary>
-        protected class MockSynonymFilter : TokenFilter
-        {
-            ICharTermAttribute termAtt;
-            IPositionIncrementAttribute posIncAtt;
-            bool addSynonym = false;
-
-            public MockSynonymFilter(TokenStream input)
-                : base(input)
-            {
-                termAtt = AddAttribute<ICharTermAttribute>();
-                posIncAtt = AddAttribute<IPositionIncrementAttribute>();
-            }
-
-            public override sealed bool IncrementToken()
-            {
-                if (addSynonym)
-                { // inject our synonym
-                    ClearAttributes();
-                    termAtt.SetEmpty().Append("dog");
-                    posIncAtt.PositionIncrement = (0);
-                    addSynonym = false;
-                    return true;
-                }
-
-                if (input.IncrementToken())
-                {
-                    addSynonym = termAtt.toString().equals("dogs");
-                    return true;
-                }
-                else
-                {
-                    return false;
-                }
-            }
-        }
-
-        /// <summary>
-        /// whitespace+lowercase analyzer without synonyms
-        /// </summary>
-        protected class Analyzer1 : Analyzer
-        {
-            public Analyzer1()
-            { }
-
-            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
-            {
-                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
-                return new TokenStreamComponents(tokenizer, new MockSynonymFilter(tokenizer));
-            }
-        }
-
-        /// <summary>
-        /// whitespace+lowercase analyzer without synonyms
-        /// </summary>
-        protected class Analyzer2 : Analyzer
-        {
-            public Analyzer2()
-            { }
-
-            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
-            {
-                return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, true));
-            }
-        }
-
-        // Moved to AbstractQueryParserTestBase
-        public override void TestNewFieldQuery()
-        {
-            throw new NotImplementedException();
-        }
-
-        /// <summary>
-        /// Mock collation analyzer: indexes terms as "collated" + term
-        /// </summary>
-        private class MockCollationFilter : TokenFilter
-        {
-            private ICharTermAttribute termAtt;
-
-            public MockCollationFilter(TokenStream input)
-                : base(input)
-            {
-                termAtt = AddAttribute<ICharTermAttribute>();
-            }
-
-            public override sealed bool IncrementToken()
-            {
-                if (input.IncrementToken())
-                {
-                    string term = termAtt.toString();
-                    termAtt.SetEmpty().Append("collated").Append(term);
-                    return true;
-                }
-                else
-                {
-                    return false;
-                }
-            }
-        }
-
-        private class MockCollationAnalyzer : Analyzer
-        {
-            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
-            {
-                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
-                return new TokenStreamComponents(tokenizer, new MockCollationFilter(tokenizer));
-            }
-        }
-
-        [Test]
-        public void TestCollatedRange()
-        {
-            ICommonQueryParserConfiguration qp = GetParserConfig(new MockCollationAnalyzer());
-            SetAnalyzeRangeTerms(qp, true);
-            Query expected = TermRangeQuery.NewStringRange(DefaultField, "collatedabc", "collateddef", true, true);
-            Query actual = GetQuery("[abc TO def]", qp);
-            assertEquals(expected, actual);
-        }
-
-        [Test]
-        public void TestDistanceAsEditsParsing()
-        {
-            FuzzyQuery q = (FuzzyQuery)GetQuery("foobar~2", new MockAnalyzer(Random()));
-            assertEquals(2, q.MaxEdits);
-        }
-
-        [Test]
-        public void TestPhraseQueryToString()
-        {
-            Analyzer analyzer = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
-            ICommonQueryParserConfiguration qp = GetParserConfig(analyzer);
-            qp.EnablePositionIncrements = (true);
-            PhraseQuery q = (PhraseQuery)GetQuery("\"this hi this is a test is\"", qp);
-            assertEquals("field:\"? hi ? ? ? test\"", q.toString());
-        }
-
-        [Test]
-        public void TestParseWildcardAndPhraseQueries()
-        {
-            string field = "content";
-            string oldDefaultField = DefaultField;
-            DefaultField = (field);
-            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random()));
-            qp.AllowLeadingWildcard=(true);
-
-            string[][] prefixQueries = new string[3][] {
-                new string[] {"a*", "ab*", "abc*",},
-                new string[] {"h*", "hi*", "hij*", "\\\\7*"},
-                new string[] {"o*", "op*", "opq*", "\\\\\\\\*"},
-            };
-
-            string[][] wildcardQueries = new string[3][] {
-                new string[] {"*a*", "*ab*", "*abc**", "ab*e*", "*g?", "*f?1", "abc**"},
-                new string[] {"*h*", "*hi*", "*hij**", "hi*k*", "*n?", "*m?1", "hij**"},
-                new string[] {"*o*", "*op*", "*opq**", "op*q*", "*u?", "*t?1", "opq**"},
-            };
-
-            // test queries that must be prefix queries
-            for (int i = 0; i < prefixQueries.Length; i++)
-            {
-                for (int j = 0; j < prefixQueries[i].Length; j++)
-                {
-                    string queryString = prefixQueries[i][j];
-                    Query q = GetQuery(queryString, qp);
-                    assertEquals(typeof(PrefixQuery), q.GetType());
-                }
-            }
-
-            // test queries that must be wildcard queries
-            for (int i = 0; i < wildcardQueries.Length; i++)
-            {
-                for (int j = 0; j < wildcardQueries[i].Length; j++)
-                {
-                    string qtxt = wildcardQueries[i][j];
-                    Query q = GetQuery(qtxt, qp);
-                    assertEquals(typeof(WildcardQuery), q.GetType());
-                }
-            }
-            DefaultField = (oldDefaultField);
-        }
-
-        [Test]
-        public void TestPhraseQueryPositionIncrements()
-        {
-            CharacterRunAutomaton stopStopList =
-            new CharacterRunAutomaton(new RegExp("[sS][tT][oO][pP]").ToAutomaton());
-
-            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false, stopStopList));
-
-            qp = GetParserConfig(
-                                 new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false, stopStopList));
-            qp.EnablePositionIncrements=(true);
-
-            PhraseQuery phraseQuery = new PhraseQuery();
-            phraseQuery.Add(new Term("field", "1"));
-            phraseQuery.Add(new Term("field", "2"), 2);
-            assertEquals(phraseQuery, GetQuery("\"1 stop 2\"", qp));
-        }
-
-        [Test]
-        public void TestMatchAllQueryParsing()
-        {
-            // test simple parsing of MatchAllDocsQuery
-            string oldDefaultField = DefaultField;
-            DefaultField = ("key");
-            ICommonQueryParserConfiguration qp = GetParserConfig(new MockAnalyzer(Random()));
-            assertEquals(new MatchAllDocsQuery(), GetQuery(new MatchAllDocsQuery().toString(), qp));
-
-            // test parsing with non-default boost
-            MatchAllDocsQuery query = new MatchAllDocsQuery();
-            query.Boost = (2.3f);
-            assertEquals(query, GetQuery(query.toString(), qp));
-            DefaultField = (oldDefaultField);
-        }
-
-        [Test]
-        public void TestNestedAndClausesFoo()
-        {
-            string query = "(field1:[1 TO *] AND field1:[* TO 2]) AND field2:(z)";
-            BooleanQuery q = new BooleanQuery();
-            BooleanQuery bq = new BooleanQuery();
-            bq.Add(TermRangeQuery.NewStringRange("field1", "1", null, true, true), BooleanClause.Occur.MUST);
-            bq.Add(TermRangeQuery.NewStringRange("field1", null, "2", true, true), BooleanClause.Occur.MUST);
-            q.Add(bq, BooleanClause.Occur.MUST);
-            q.Add(new TermQuery(new Term("field2", "z")), BooleanClause.Occur.MUST);
-            assertEquals(q, GetQuery(query, new MockAnalyzer(Random())));
-        }
-    }
-
-
-    /// <summary>
-    /// This class was added in .NET because the Visual Studio test runner
-    /// does not detect tests in abstract classes. Therefore, the abstract members
-    /// of QueryParserTestBase were moved here so the QueryParserTestBase class
-    /// could be made concrete.
-    /// </summary>
-    public abstract class AbstractQueryParserTestBase : LuceneTestCase
-    {
-        public abstract void TestStarParsing();
-
-        public abstract void TestNewFieldQuery();
-
-        public abstract void TestDefaultOperator();
-
-        public abstract ICommonQueryParserConfiguration GetParserConfig(Analyzer a);
-
-        public abstract void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC);
-
-        public abstract void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC);
-
-        public abstract void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC, bool value);
-
-        public abstract void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC, bool value);
-
-        public abstract void SetDateResolution(ICommonQueryParserConfiguration cqpC, ICharSequence field, DateTools.Resolution value);
-
-        public abstract Query GetQuery(string query, ICommonQueryParserConfiguration cqpC);
-
-        public abstract Query GetQuery(string query, Analyzer a);
-
-        public abstract bool IsQueryParserException(Exception exception);
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/packages.config
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/packages.config b/Lucene.Net.Tests.QueryParser/packages.config
deleted file mode 100644
index 139d513..0000000
--- a/Lucene.Net.Tests.QueryParser/packages.config
+++ /dev/null
@@ -1,4 +0,0 @@
-\ufeff<?xml version="1.0" encoding="utf-8"?>
-<packages>
-  <package id="NUnit" version="2.6.3" targetFramework="net451" />
-</packages>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.sln
----------------------------------------------------------------------
diff --git a/Lucene.Net.sln b/Lucene.Net.sln
index c6031b4..cf88bce 100644
--- a/Lucene.Net.sln
+++ b/Lucene.Net.sln
@@ -46,9 +46,9 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.Tests.Analysis.C
 EndProject
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.Tests.Codecs", "src\Lucene.Net.Tests.Codecs\Lucene.Net.Tests.Codecs.csproj", "{351B75B1-BBD5-4E32-8036-7BED4E0135A6}"
 EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.QueryParser", "Lucene.Net.QueryParser\Lucene.Net.QueryParser.csproj", "{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}"
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.QueryParser", "src\Lucene.Net.QueryParser\Lucene.Net.QueryParser.csproj", "{949BA34B-6AE6-4CE3-B578-61E13E4D76BF}"
 EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.Tests.QueryParser", "Lucene.Net.Tests.QueryParser\Lucene.Net.Tests.QueryParser.csproj", "{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}"
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Lucene.Net.Tests.QueryParser", "src\Lucene.Net.Tests.QueryParser\Lucene.Net.Tests.QueryParser.csproj", "{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}"
 EndProject
 Global
 	GlobalSection(SolutionConfigurationPlatforms) = preSolution

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs b/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
new file mode 100644
index 0000000..8930aa4
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
@@ -0,0 +1,198 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.QueryParser.Classic;
+using Lucene.Net.Search;
+using Lucene.Net.Util;
+using System.Text;
+using System.Text.RegularExpressions;
+
+namespace Lucene.Net.QueryParser.Analyzing
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Overrides Lucene's default QueryParser so that Fuzzy-, Prefix-, Range-, and WildcardQuerys
+    /// are also passed through the given analyzer, but wildcard characters <code>*</code> and
+    /// <code>?</code> don't get removed from the search terms.
+    /// 
+    /// <p><b>Warning:</b> This class should only be used with analyzers that do not use stopwords
+    /// or that add tokens. Also, several stemming analyzers are inappropriate: for example, GermanAnalyzer 
+    /// will turn <code>H&auml;user</code> into <code>hau</code>, but <code>H?user</code> will 
+    /// become <code>h?user</code> when using this parser and thus no match would be found (i.e.
+    /// using this parser will be no improvement over QueryParser in such cases). 
+    /// </summary>
+    public class AnalyzingQueryParser : Classic.QueryParser
+    {
+        // gobble escaped chars or find a wildcard character 
+        private readonly Regex wildcardPattern = new Regex(@"(\\.)|([?*]+)", RegexOptions.Compiled);
+
+        public AnalyzingQueryParser(LuceneVersion matchVersion, string field, Analyzer analyzer)
+            : base(matchVersion, field, analyzer)
+        {
+            AnalyzeRangeTerms = true;
+        }
+
+        /// <summary>
+        /// Called when parser parses an input term
+        /// that uses prefix notation; that is, contains a single '*' wildcard
+        /// character as its last character. Since this is a special case
+        /// of generic wildcard term, and such a query can be optimized easily,
+        /// this usually results in a different query object.
+        /// <p>
+        /// Depending on analyzer and settings, a prefix term may (most probably will)
+        /// be lower-cased automatically. It <b>will</b> go through the default Analyzer.
+        /// <p>
+        /// Overrides super class, by passing terms through analyzer.
+        /// </summary>
+        /// <param name="field">Name of the field query will use.</param>
+        /// <param name="termStr">Term to use for building term for the query
+        /// (<b>without</b> trailing '*' character!)</param>
+        /// <returns>Resulting <see cref="Query"/> built for the term</returns>
+        protected internal override Query GetWildcardQuery(string field, string termStr)
+        {
+            if (termStr == null)
+            {
+                //can't imagine this would ever happen
+                throw new ParseException("Passed null value as term to getWildcardQuery");
+            }
+            if (!AllowLeadingWildcard && (termStr.StartsWith("*") || termStr.StartsWith("?")))
+            {
+                throw new ParseException("'*' or '?' not allowed as first character in WildcardQuery"
+                                        + " unless getAllowLeadingWildcard() returns true");
+            }
+
+            Match wildcardMatcher = wildcardPattern.Match(termStr);
+            StringBuilder sb = new StringBuilder();
+            int last = 0;
+
+            while (wildcardMatcher.Success)
+            {
+                // continue if escaped char
+                if (wildcardMatcher.Groups[1].Success)
+                {
+                    wildcardMatcher = wildcardMatcher.NextMatch();
+                    continue;
+                }
+
+                if (wildcardMatcher.Index > last)
+                {
+                    string chunk = termStr.Substring(last, wildcardMatcher.Index - last);
+                    string analyzed = AnalyzeSingleChunk(field, termStr, chunk);
+                    sb.Append(analyzed);
+                }
+
+                //append the wildcard character
+                sb.Append(wildcardMatcher.Groups[2]);
+
+                last = wildcardMatcher.Index + wildcardMatcher.Length;
+                wildcardMatcher = wildcardMatcher.NextMatch();
+            }
+            if (last < termStr.Length)
+            {
+                sb.Append(AnalyzeSingleChunk(field, termStr, termStr.Substring(last)));
+            }
+            return base.GetWildcardQuery(field, sb.ToString());
+        }
+
+        /// <summary>
+        /// Called when parser parses an input term that has the fuzzy suffix (~) appended.
+        /// <p>
+        /// Depending on analyzer and settings, a fuzzy term may (most probably will)
+        /// be lower-cased automatically. It <b>will</b> go through the default Analyzer.
+        /// <p>
+        /// Overrides super class, by passing terms through analyzer.
+        /// </summary>
+        /// <param name="field">Name of the field query will use.</param>
+        /// <param name="termStr">Term to use for building term for the query</param>
+        /// <param name="minSimilarity"></param>
+        /// <returns>Resulting <see cref="Query"/> built for the term</returns>
+        protected internal override Query GetFuzzyQuery(string field, string termStr, float minSimilarity)
+        {
+            string analyzed = AnalyzeSingleChunk(field, termStr, termStr);
+            return base.GetFuzzyQuery(field, analyzed, minSimilarity);
+        }
+
+        /// <summary>
+        /// Returns the analyzed form for the given chunk
+        /// 
+        /// If the analyzer produces more than one output token from the given chunk,
+        /// a ParseException is thrown.
+        /// </summary>
+        /// <param name="field">The target field</param>
+        /// <param name="termStr">The full term from which the given chunk is excerpted</param>
+        /// <param name="chunk">The portion of the given termStr to be analyzed</param>
+        /// <returns>The result of analyzing the given chunk</returns>
+        /// <exception cref="ParseException">ParseException when analysis returns other than one output token</exception>
+        protected internal string AnalyzeSingleChunk(string field, string termStr, string chunk)
+        {
+            string analyzed = null;
+            TokenStream stream = null;
+            try
+            {
+                stream = Analyzer.TokenStream(field, chunk);
+                stream.Reset();
+                ICharTermAttribute termAtt = stream.GetAttribute<ICharTermAttribute>();
+                // get first and hopefully only output token
+                if (stream.IncrementToken())
+                {
+                    analyzed = termAtt.ToString();
+
+                    // try to increment again, there should only be one output token
+                    StringBuilder multipleOutputs = null;
+                    while (stream.IncrementToken())
+                    {
+                        if (null == multipleOutputs)
+                        {
+                            multipleOutputs = new StringBuilder();
+                            multipleOutputs.Append('"');
+                            multipleOutputs.Append(analyzed);
+                            multipleOutputs.Append('"');
+                        }
+                        multipleOutputs.Append(',');
+                        multipleOutputs.Append('"');
+                        multipleOutputs.Append(termAtt.ToString());
+                        multipleOutputs.Append('"');
+                    }
+                    stream.End();
+                    if (null != multipleOutputs)
+                    {
+                        throw new ParseException(
+                            string.Format(Locale, "Analyzer created multiple terms for \"%s\": %s", chunk, multipleOutputs.ToString()));
+                    }
+                }
+                else
+                {
+                    // nothing returned by analyzer.  Was it a stop word and the user accidentally
+                    // used an analyzer with stop words?
+                    stream.End();
+                    throw new ParseException(string.Format(Locale, "Analyzer returned nothing for \"%s\"", chunk));
+                }
+            }
+            catch (System.IO.IOException e)
+            {
+                throw new ParseException(
+                    string.Format(Locale, "IO error while trying to analyze single term: \"%s\"", termStr));
+            }
+            finally
+            {
+                IOUtils.CloseWhileHandlingException(stream);
+            }
+            return analyzed;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Classic/CharStream.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/CharStream.cs b/src/Lucene.Net.QueryParser/Classic/CharStream.cs
new file mode 100644
index 0000000..8d0fc3a
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Classic/CharStream.cs
@@ -0,0 +1,134 @@
+using System;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+	
+	/// <summary> 
+    /// This interface describes a character stream that maintains line and
+	/// column number positions of the characters.  It also has the capability
+	/// to backup the stream to some extent.  An implementation of this
+	/// interface is used in the TokenManager implementation generated by
+	/// JavaCCParser.
+	/// 
+	/// All the methods except backup can be implemented in any fashion. backup
+	/// needs to be implemented correctly for the correct operation of the lexer.
+	/// Rest of the methods are all used to get information like line number,
+	/// column number and the String that constitutes a token and are not used
+	/// by the lexer. Hence their implementation won't affect the generated lexer's
+	/// operation.
+	/// </summary>
+	public interface ICharStream
+	{
+		/// <summary> 
+        /// Returns the next character from the selected input.  The method
+		/// of selecting the input is the responsibility of the class
+		/// implementing this interface.  Can throw any java.io.IOException.
+		/// </summary>
+		char ReadChar();
+
+	    /// <summary>
+        /// Returns the column position of the character last read.
+        /// </summary>
+	    /// <deprecated>
+	    /// </deprecated>
+	    /// <seealso cref="EndColumn">
+	    /// </seealso>
+	    [Obsolete]
+	    int Column { get; }
+
+	    /// <summary>
+        /// Returns the line number of the character last read.
+        /// </summary>
+	    /// <deprecated>
+	    /// </deprecated>
+	    /// <seealso cref="EndLine">
+	    /// </seealso>
+	    [Obsolete]
+	    int Line { get; }
+
+	    /// <summary>
+        /// Returns the column number of the last character for current token (being
+	    /// matched after the last call to BeginTOken).
+	    /// </summary>
+	    int EndColumn { get; }
+
+	    /// <summary> 
+        /// Returns the line number of the last character for current token (being
+	    /// matched after the last call to BeginTOken).
+	    /// </summary>
+	    int EndLine { get; }
+
+	    /// <summary> 
+        /// Returns the column number of the first character for current token (being
+	    /// matched after the last call to BeginTOken).
+	    /// </summary>
+	    int BeginColumn { get; }
+
+	    /// <summary> 
+        /// Returns the line number of the first character for current token (being
+	    /// matched after the last call to BeginTOken).
+	    /// </summary>
+	    int BeginLine { get; }
+
+	    /// <summary> 
+        /// Backs up the input stream by amount steps. Lexer calls this method if it
+		/// had already read some characters, but could not use them to match a
+		/// (longer) token. So, they will be used again as the prefix of the next
+		/// token and it is the implemetation's responsibility to do this right.
+		/// </summary>
+		void  Backup(int amount);
+		
+		/// <summary> 
+        /// Returns the next character that marks the beginning of the next token.
+		/// All characters must remain in the buffer between two successive calls
+		/// to this method to implement backup correctly.
+		/// </summary>
+		char BeginToken();
+
+	    /// <summary> 
+        /// Returns a string made up of characters from the marked token beginning
+	    /// to the current buffer position. Implementations have the choice of returning
+	    /// anything that they want to. For example, for efficiency, one might decide
+	    /// to just return null, which is a valid implementation.
+	    /// </summary>
+	    string Image { get; }
+
+	    /// <summary> 
+        /// Returns an array of characters that make up the suffix of length 'len' for
+		/// the currently matched token. This is used to build up the matched string
+		/// for use in actions in the case of MORE. A simple and inefficient
+		/// implementation of this is as follows :
+		/// 
+		/// {
+		/// String t = GetImage();
+		/// return t.substring(t.length() - len, t.length()).toCharArray();
+		/// }
+		/// </summary>
+		char[] GetSuffix(int len);
+		
+		/// <summary> 
+        /// The lexer calls this function to indicate that it is done with the stream
+		/// and hence implementations can free any resources held by this class.
+		/// Again, the body of this function can be just empty and it will not
+		/// affect the lexer's operation.
+		/// </summary>
+		void  Done();
+	}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Classic/FastCharStream.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/FastCharStream.cs b/src/Lucene.Net.QueryParser/Classic/FastCharStream.cs
new file mode 100644
index 0000000..b4da568
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Classic/FastCharStream.cs
@@ -0,0 +1,158 @@
+using System;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+	
+	/// <summary>
+    /// An efficient implementation of JavaCC's CharStream interface.  <p/>Note that
+	/// this does not do line-number counting, but instead keeps track of the
+	/// character position of the token in the input, as required by Lucene's <see cref="Lucene.Net.Analysis.Token" />
+	/// API.
+	/// </summary>
+	public sealed class FastCharStream : ICharStream
+	{
+		internal char[] buffer = null;
+		
+		internal int bufferLength = 0; // end of valid chars
+		internal int bufferPosition = 0; // next char to read
+		
+		internal int tokenStart = 0; // offset in buffer
+		internal int bufferStart = 0; // position in file of buffer
+		
+		internal System.IO.TextReader input; // source of chars
+		
+		/// <summary>
+        /// Constructs from a Reader. 
+        /// </summary>
+		public FastCharStream(System.IO.TextReader r)
+		{
+			input = r;
+		}
+		
+		public char ReadChar()
+		{
+			if (bufferPosition >= bufferLength)
+				Refill();
+			return buffer[bufferPosition++];
+		}
+		
+		private void  Refill()
+		{
+			int newPosition = bufferLength - tokenStart;
+			
+			if (tokenStart == 0)
+			{
+				// token won't fit in buffer
+				if (buffer == null)
+				{
+					// first time: alloc buffer
+					buffer = new char[2048];
+				}
+				else if (bufferLength == buffer.Length)
+				{
+					// grow buffer
+					char[] newBuffer = new char[buffer.Length * 2];
+					Array.Copy(buffer, 0, newBuffer, 0, bufferLength);
+					buffer = newBuffer;
+				}
+			}
+			else
+			{
+				// shift token to front
+				Array.Copy(buffer, tokenStart, buffer, 0, newPosition);
+			}
+			
+			bufferLength = newPosition; // update state
+			bufferPosition = newPosition;
+			bufferStart += tokenStart;
+			tokenStart = 0;
+			
+			int charsRead = input.Read(buffer, newPosition, buffer.Length - newPosition);
+			if (charsRead <= 0)
+				throw new System.IO.IOException("read past eof");
+			else
+				bufferLength += charsRead;
+		}
+		
+		public char BeginToken()
+		{
+			tokenStart = bufferPosition;
+			return ReadChar();
+		}
+		
+		public void  Backup(int amount)
+		{
+			bufferPosition -= amount;
+		}
+
+	    public string Image
+	    {
+	        get { return new System.String(buffer, tokenStart, bufferPosition - tokenStart); }
+	    }
+
+	    public char[] GetSuffix(int len)
+		{
+			char[] value_Renamed = new char[len];
+			Array.Copy(buffer, bufferPosition - len, value_Renamed, 0, len);
+			return value_Renamed;
+		}
+		
+		public void Done()
+		{
+			try
+			{
+				input.Close();
+			}
+			catch (System.IO.IOException e)
+			{
+				System.Console.Error.WriteLine("Caught: " + e + "; ignoring.");
+			}
+		}
+
+	    public int Column
+	    {
+	        get { return bufferStart + bufferPosition; }
+	    }
+
+	    public int Line
+	    {
+	        get { return 1; }
+	    }
+
+	    public int EndColumn
+	    {
+	        get { return bufferStart + bufferPosition; }
+	    }
+
+	    public int EndLine
+	    {
+	        get { return 1; }
+	    }
+
+	    public int BeginColumn
+	    {
+	        get { return bufferStart + tokenStart; }
+	    }
+
+	    public int BeginLine
+	    {
+	        get { return 1; }
+	    }
+	}
+}
\ No newline at end of file


[36/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/679ad24c
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/679ad24c
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/679ad24c

Branch: refs/heads/master
Commit: 679ad24cc41f0a91931edfca372b1ed688cbdee6
Parents: 0f10c9f
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Tue Aug 2 19:32:37 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:30:50 2016 +0700

----------------------------------------------------------------------
 .../Analyzing/AnalyzingQueryParser.cs           |  198 ---
 Lucene.Net.QueryParser/Classic/CharStream.cs    |  134 --
 .../Classic/FastCharStream.cs                   |  158 --
 .../Classic/MultiFieldQueryParser.cs            |  404 -----
 .../Classic/ParseException.cs                   |  235 ---
 Lucene.Net.QueryParser/Classic/QueryParser.cs   |  921 -----------
 .../Classic/QueryParserBase.cs                  | 1025 ------------
 .../Classic/QueryParserConstants.cs             |  224 ---
 .../Classic/QueryParserTokenManager.cs          | 1356 ----------------
 Lucene.Net.QueryParser/Classic/Token.cs         |  142 --
 Lucene.Net.QueryParser/Classic/TokenMgrError.cs |  170 --
 .../ComplexPhrase/ComplexPhraseQueryParser.cs   |  468 ------
 .../Ext/ExtendableQueryParser.cs                |  131 --
 Lucene.Net.QueryParser/Ext/ExtensionQuery.cs    |   54 -
 Lucene.Net.QueryParser/Ext/Extensions.cs        |  167 --
 Lucene.Net.QueryParser/Ext/ParserExtension.cs   |   50 -
 .../Standard/CommonQueryParserConfiguration.cs  |  106 --
 .../Lucene.Net.QueryParser.csproj               |  107 --
 .../Properties/AssemblyInfo.cs                  |   39 -
 .../Simple/SimpleQueryParser.cs                 |  788 ---------
 .../Surround/Parser/CharStream.cs               |  134 --
 .../Surround/Parser/FastCharStream.cs           |  158 --
 .../Surround/Parser/ParseException.cs           |  234 ---
 .../Surround/Parser/QueryParser.cs              |  912 -----------
 .../Surround/Parser/QueryParserConstants.cs     |  120 --
 .../Surround/Parser/QueryParserTokenManager.cs  |  760 ---------
 Lucene.Net.QueryParser/Surround/Parser/Token.cs |  142 --
 .../Surround/Parser/TokenMgrError.cs            |  170 --
 .../Surround/Query/AndQuery.cs                  |   39 -
 .../Surround/Query/BasicQueryFactory.cs         |  110 --
 .../Surround/Query/ComposedQuery.cs             |  144 --
 .../Surround/Query/DistanceQuery.cs             |  117 --
 .../Surround/Query/DistanceRewriteQuery.cs      |   35 -
 .../Surround/Query/DistanceSubQuery.cs          |   36 -
 .../Surround/Query/FieldsQuery.cs               |  105 --
 .../Surround/Query/NotQuery.cs                  |   48 -
 .../Surround/Query/OrQuery.cs                   |   71 -
 .../Surround/Query/RewriteQuery.cs              |   85 -
 .../Surround/Query/SimpleTerm.cs                |  118 --
 .../Surround/Query/SimpleTermRewriteQuery.cs    |   64 -
 .../Surround/Query/SpanNearClauseFactory.cs     |   93 --
 .../Surround/Query/SrndBooleanQuery.cs          |   51 -
 .../Surround/Query/SrndPrefixQuery.cs           |  108 --
 .../Surround/Query/SrndQuery.cs                 |  149 --
 .../Surround/Query/SrndTermQuery.cs             |   63 -
 .../Surround/Query/SrndTruncQuery.cs            |  139 --
 .../Surround/Query/TooManyBasicQueries.cs       |   30 -
 .../Analyzing/TestAnalyzingQueryParser.cs       |  341 ----
 .../Classic/TestMultiAnalyzer.cs                |  278 ----
 .../Classic/TestMultiFieldQueryParser.cs        |  376 -----
 .../Classic/TestMultiPhraseQueryParsing.cs      |  121 --
 .../Classic/TestQueryParser.cs                  |  564 -------
 .../ComplexPhrase/TestComplexPhraseQuery.cs     |  214 ---
 .../Ext/ExtensionStub.cs                        |   30 -
 .../Ext/TestExtendableQueryParser.cs            |  145 --
 .../Ext/TestExtensions.cs                       |   97 --
 .../Lucene.Net.Tests.QueryParser.csproj         |   95 --
 .../Properties/AssemblyInfo.cs                  |   36 -
 .../Simple/TestSimpleQueryParser.cs             |  728 ---------
 .../Surround/Query/BooleanQueryTst.cs           |  142 --
 .../Surround/Query/ExceptionQueryTst.cs         |   76 -
 .../Surround/Query/SingleFieldTestDb.cs         |   55 -
 .../Surround/Query/SrndQueryTest.cs             |   48 -
 .../Surround/Query/Test01Exceptions.cs          |   72 -
 .../Surround/Query/Test02Boolean.cs             |  178 --
 .../Surround/Query/Test03Distance.cs            |  341 ----
 .../Util/QueryParserTestBase.cs                 | 1523 ------------------
 Lucene.Net.Tests.QueryParser/packages.config    |    4 -
 Lucene.Net.sln                                  |    4 +-
 .../Analyzing/AnalyzingQueryParser.cs           |  198 +++
 .../Classic/CharStream.cs                       |  134 ++
 .../Classic/FastCharStream.cs                   |  158 ++
 .../Classic/MultiFieldQueryParser.cs            |  404 +++++
 .../Classic/ParseException.cs                   |  235 +++
 .../Classic/QueryParser.cs                      |  921 +++++++++++
 .../Classic/QueryParserBase.cs                  | 1025 ++++++++++++
 .../Classic/QueryParserConstants.cs             |  224 +++
 .../Classic/QueryParserTokenManager.cs          | 1356 ++++++++++++++++
 src/Lucene.Net.QueryParser/Classic/Token.cs     |  142 ++
 .../Classic/TokenMgrError.cs                    |  170 ++
 .../ComplexPhrase/ComplexPhraseQueryParser.cs   |  468 ++++++
 .../Ext/ExtendableQueryParser.cs                |  131 ++
 .../Ext/ExtensionQuery.cs                       |   54 +
 src/Lucene.Net.QueryParser/Ext/Extensions.cs    |  167 ++
 .../Ext/ParserExtension.cs                      |   50 +
 .../Standard/CommonQueryParserConfiguration.cs  |  106 ++
 .../Lucene.Net.QueryParser.csproj               |  107 ++
 .../Properties/AssemblyInfo.cs                  |   39 +
 .../Simple/SimpleQueryParser.cs                 |  788 +++++++++
 .../Surround/Parser/CharStream.cs               |  134 ++
 .../Surround/Parser/FastCharStream.cs           |  158 ++
 .../Surround/Parser/ParseException.cs           |  234 +++
 .../Surround/Parser/QueryParser.cs              |  912 +++++++++++
 .../Surround/Parser/QueryParserConstants.cs     |  120 ++
 .../Surround/Parser/QueryParserTokenManager.cs  |  760 +++++++++
 .../Surround/Parser/Token.cs                    |  142 ++
 .../Surround/Parser/TokenMgrError.cs            |  170 ++
 .../Surround/Query/AndQuery.cs                  |   39 +
 .../Surround/Query/BasicQueryFactory.cs         |  110 ++
 .../Surround/Query/ComposedQuery.cs             |  144 ++
 .../Surround/Query/DistanceQuery.cs             |  117 ++
 .../Surround/Query/DistanceRewriteQuery.cs      |   35 +
 .../Surround/Query/DistanceSubQuery.cs          |   36 +
 .../Surround/Query/FieldsQuery.cs               |  105 ++
 .../Surround/Query/NotQuery.cs                  |   48 +
 .../Surround/Query/OrQuery.cs                   |   71 +
 .../Surround/Query/RewriteQuery.cs              |   85 +
 .../Surround/Query/SimpleTerm.cs                |  118 ++
 .../Surround/Query/SimpleTermRewriteQuery.cs    |   64 +
 .../Surround/Query/SpanNearClauseFactory.cs     |   93 ++
 .../Surround/Query/SrndBooleanQuery.cs          |   51 +
 .../Surround/Query/SrndPrefixQuery.cs           |  108 ++
 .../Surround/Query/SrndQuery.cs                 |  149 ++
 .../Surround/Query/SrndTermQuery.cs             |   63 +
 .../Surround/Query/SrndTruncQuery.cs            |  139 ++
 .../Surround/Query/TooManyBasicQueries.cs       |   30 +
 .../Analyzing/TestAnalyzingQueryParser.cs       |  341 ++++
 .../Classic/TestMultiAnalyzer.cs                |  278 ++++
 .../Classic/TestMultiFieldQueryParser.cs        |  376 +++++
 .../Classic/TestMultiPhraseQueryParsing.cs      |  121 ++
 .../Classic/TestQueryParser.cs                  |  564 +++++++
 .../ComplexPhrase/TestComplexPhraseQuery.cs     |  214 +++
 .../Ext/ExtensionStub.cs                        |   30 +
 .../Ext/TestExtendableQueryParser.cs            |  145 ++
 .../Ext/TestExtensions.cs                       |   97 ++
 .../Lucene.Net.Tests.QueryParser.csproj         |   94 ++
 .../Properties/AssemblyInfo.cs                  |   36 +
 .../Simple/TestSimpleQueryParser.cs             |  728 +++++++++
 .../Surround/Query/BooleanQueryTst.cs           |  142 ++
 .../Surround/Query/ExceptionQueryTst.cs         |   76 +
 .../Surround/Query/SingleFieldTestDb.cs         |   55 +
 .../Surround/Query/SrndQueryTest.cs             |   48 +
 .../Surround/Query/Test01Exceptions.cs          |   72 +
 .../Surround/Query/Test02Boolean.cs             |  178 ++
 .../Surround/Query/Test03Distance.cs            |  341 ++++
 .../Util/QueryParserTestBase.cs                 | 1523 ++++++++++++++++++
 .../packages.config                             |    4 +
 137 files changed, 16577 insertions(+), 16578 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs b/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
deleted file mode 100644
index 8930aa4..0000000
--- a/Lucene.Net.QueryParser/Analyzing/AnalyzingQueryParser.cs
+++ /dev/null
@@ -1,198 +0,0 @@
-\ufeffusing Lucene.Net.Analysis;
-using Lucene.Net.Analysis.Tokenattributes;
-using Lucene.Net.QueryParser.Classic;
-using Lucene.Net.Search;
-using Lucene.Net.Util;
-using System.Text;
-using System.Text.RegularExpressions;
-
-namespace Lucene.Net.QueryParser.Analyzing
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Overrides Lucene's default QueryParser so that Fuzzy-, Prefix-, Range-, and WildcardQuerys
-    /// are also passed through the given analyzer, but wildcard characters <code>*</code> and
-    /// <code>?</code> don't get removed from the search terms.
-    /// 
-    /// <p><b>Warning:</b> This class should only be used with analyzers that do not use stopwords
-    /// or that add tokens. Also, several stemming analyzers are inappropriate: for example, GermanAnalyzer 
-    /// will turn <code>H&auml;user</code> into <code>hau</code>, but <code>H?user</code> will 
-    /// become <code>h?user</code> when using this parser and thus no match would be found (i.e.
-    /// using this parser will be no improvement over QueryParser in such cases). 
-    /// </summary>
-    public class AnalyzingQueryParser : Classic.QueryParser
-    {
-        // gobble escaped chars or find a wildcard character 
-        private readonly Regex wildcardPattern = new Regex(@"(\\.)|([?*]+)", RegexOptions.Compiled);
-
-        public AnalyzingQueryParser(LuceneVersion matchVersion, string field, Analyzer analyzer)
-            : base(matchVersion, field, analyzer)
-        {
-            AnalyzeRangeTerms = true;
-        }
-
-        /// <summary>
-        /// Called when parser parses an input term
-        /// that uses prefix notation; that is, contains a single '*' wildcard
-        /// character as its last character. Since this is a special case
-        /// of generic wildcard term, and such a query can be optimized easily,
-        /// this usually results in a different query object.
-        /// <p>
-        /// Depending on analyzer and settings, a prefix term may (most probably will)
-        /// be lower-cased automatically. It <b>will</b> go through the default Analyzer.
-        /// <p>
-        /// Overrides super class, by passing terms through analyzer.
-        /// </summary>
-        /// <param name="field">Name of the field query will use.</param>
-        /// <param name="termStr">Term to use for building term for the query
-        /// (<b>without</b> trailing '*' character!)</param>
-        /// <returns>Resulting <see cref="Query"/> built for the term</returns>
-        protected internal override Query GetWildcardQuery(string field, string termStr)
-        {
-            if (termStr == null)
-            {
-                //can't imagine this would ever happen
-                throw new ParseException("Passed null value as term to getWildcardQuery");
-            }
-            if (!AllowLeadingWildcard && (termStr.StartsWith("*") || termStr.StartsWith("?")))
-            {
-                throw new ParseException("'*' or '?' not allowed as first character in WildcardQuery"
-                                        + " unless getAllowLeadingWildcard() returns true");
-            }
-
-            Match wildcardMatcher = wildcardPattern.Match(termStr);
-            StringBuilder sb = new StringBuilder();
-            int last = 0;
-
-            while (wildcardMatcher.Success)
-            {
-                // continue if escaped char
-                if (wildcardMatcher.Groups[1].Success)
-                {
-                    wildcardMatcher = wildcardMatcher.NextMatch();
-                    continue;
-                }
-
-                if (wildcardMatcher.Index > last)
-                {
-                    string chunk = termStr.Substring(last, wildcardMatcher.Index - last);
-                    string analyzed = AnalyzeSingleChunk(field, termStr, chunk);
-                    sb.Append(analyzed);
-                }
-
-                //append the wildcard character
-                sb.Append(wildcardMatcher.Groups[2]);
-
-                last = wildcardMatcher.Index + wildcardMatcher.Length;
-                wildcardMatcher = wildcardMatcher.NextMatch();
-            }
-            if (last < termStr.Length)
-            {
-                sb.Append(AnalyzeSingleChunk(field, termStr, termStr.Substring(last)));
-            }
-            return base.GetWildcardQuery(field, sb.ToString());
-        }
-
-        /// <summary>
-        /// Called when parser parses an input term that has the fuzzy suffix (~) appended.
-        /// <p>
-        /// Depending on analyzer and settings, a fuzzy term may (most probably will)
-        /// be lower-cased automatically. It <b>will</b> go through the default Analyzer.
-        /// <p>
-        /// Overrides super class, by passing terms through analyzer.
-        /// </summary>
-        /// <param name="field">Name of the field query will use.</param>
-        /// <param name="termStr">Term to use for building term for the query</param>
-        /// <param name="minSimilarity"></param>
-        /// <returns>Resulting <see cref="Query"/> built for the term</returns>
-        protected internal override Query GetFuzzyQuery(string field, string termStr, float minSimilarity)
-        {
-            string analyzed = AnalyzeSingleChunk(field, termStr, termStr);
-            return base.GetFuzzyQuery(field, analyzed, minSimilarity);
-        }
-
-        /// <summary>
-        /// Returns the analyzed form for the given chunk
-        /// 
-        /// If the analyzer produces more than one output token from the given chunk,
-        /// a ParseException is thrown.
-        /// </summary>
-        /// <param name="field">The target field</param>
-        /// <param name="termStr">The full term from which the given chunk is excerpted</param>
-        /// <param name="chunk">The portion of the given termStr to be analyzed</param>
-        /// <returns>The result of analyzing the given chunk</returns>
-        /// <exception cref="ParseException">ParseException when analysis returns other than one output token</exception>
-        protected internal string AnalyzeSingleChunk(string field, string termStr, string chunk)
-        {
-            string analyzed = null;
-            TokenStream stream = null;
-            try
-            {
-                stream = Analyzer.TokenStream(field, chunk);
-                stream.Reset();
-                ICharTermAttribute termAtt = stream.GetAttribute<ICharTermAttribute>();
-                // get first and hopefully only output token
-                if (stream.IncrementToken())
-                {
-                    analyzed = termAtt.ToString();
-
-                    // try to increment again, there should only be one output token
-                    StringBuilder multipleOutputs = null;
-                    while (stream.IncrementToken())
-                    {
-                        if (null == multipleOutputs)
-                        {
-                            multipleOutputs = new StringBuilder();
-                            multipleOutputs.Append('"');
-                            multipleOutputs.Append(analyzed);
-                            multipleOutputs.Append('"');
-                        }
-                        multipleOutputs.Append(',');
-                        multipleOutputs.Append('"');
-                        multipleOutputs.Append(termAtt.ToString());
-                        multipleOutputs.Append('"');
-                    }
-                    stream.End();
-                    if (null != multipleOutputs)
-                    {
-                        throw new ParseException(
-                            string.Format(Locale, "Analyzer created multiple terms for \"%s\": %s", chunk, multipleOutputs.ToString()));
-                    }
-                }
-                else
-                {
-                    // nothing returned by analyzer.  Was it a stop word and the user accidentally
-                    // used an analyzer with stop words?
-                    stream.End();
-                    throw new ParseException(string.Format(Locale, "Analyzer returned nothing for \"%s\"", chunk));
-                }
-            }
-            catch (System.IO.IOException e)
-            {
-                throw new ParseException(
-                    string.Format(Locale, "IO error while trying to analyze single term: \"%s\"", termStr));
-            }
-            finally
-            {
-                IOUtils.CloseWhileHandlingException(stream);
-            }
-            return analyzed;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Classic/CharStream.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/CharStream.cs b/Lucene.Net.QueryParser/Classic/CharStream.cs
deleted file mode 100644
index 8d0fc3a..0000000
--- a/Lucene.Net.QueryParser/Classic/CharStream.cs
+++ /dev/null
@@ -1,134 +0,0 @@
-using System;
-
-namespace Lucene.Net.QueryParser.Classic
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-	
-	/// <summary> 
-    /// This interface describes a character stream that maintains line and
-	/// column number positions of the characters.  It also has the capability
-	/// to backup the stream to some extent.  An implementation of this
-	/// interface is used in the TokenManager implementation generated by
-	/// JavaCCParser.
-	/// 
-	/// All the methods except backup can be implemented in any fashion. backup
-	/// needs to be implemented correctly for the correct operation of the lexer.
-	/// Rest of the methods are all used to get information like line number,
-	/// column number and the String that constitutes a token and are not used
-	/// by the lexer. Hence their implementation won't affect the generated lexer's
-	/// operation.
-	/// </summary>
-	public interface ICharStream
-	{
-		/// <summary> 
-        /// Returns the next character from the selected input.  The method
-		/// of selecting the input is the responsibility of the class
-		/// implementing this interface.  Can throw any java.io.IOException.
-		/// </summary>
-		char ReadChar();
-
-	    /// <summary>
-        /// Returns the column position of the character last read.
-        /// </summary>
-	    /// <deprecated>
-	    /// </deprecated>
-	    /// <seealso cref="EndColumn">
-	    /// </seealso>
-	    [Obsolete]
-	    int Column { get; }
-
-	    /// <summary>
-        /// Returns the line number of the character last read.
-        /// </summary>
-	    /// <deprecated>
-	    /// </deprecated>
-	    /// <seealso cref="EndLine">
-	    /// </seealso>
-	    [Obsolete]
-	    int Line { get; }
-
-	    /// <summary>
-        /// Returns the column number of the last character for current token (being
-	    /// matched after the last call to BeginTOken).
-	    /// </summary>
-	    int EndColumn { get; }
-
-	    /// <summary> 
-        /// Returns the line number of the last character for current token (being
-	    /// matched after the last call to BeginTOken).
-	    /// </summary>
-	    int EndLine { get; }
-
-	    /// <summary> 
-        /// Returns the column number of the first character for current token (being
-	    /// matched after the last call to BeginTOken).
-	    /// </summary>
-	    int BeginColumn { get; }
-
-	    /// <summary> 
-        /// Returns the line number of the first character for current token (being
-	    /// matched after the last call to BeginTOken).
-	    /// </summary>
-	    int BeginLine { get; }
-
-	    /// <summary> 
-        /// Backs up the input stream by amount steps. Lexer calls this method if it
-		/// had already read some characters, but could not use them to match a
-		/// (longer) token. So, they will be used again as the prefix of the next
-		/// token and it is the implemetation's responsibility to do this right.
-		/// </summary>
-		void  Backup(int amount);
-		
-		/// <summary> 
-        /// Returns the next character that marks the beginning of the next token.
-		/// All characters must remain in the buffer between two successive calls
-		/// to this method to implement backup correctly.
-		/// </summary>
-		char BeginToken();
-
-	    /// <summary> 
-        /// Returns a string made up of characters from the marked token beginning
-	    /// to the current buffer position. Implementations have the choice of returning
-	    /// anything that they want to. For example, for efficiency, one might decide
-	    /// to just return null, which is a valid implementation.
-	    /// </summary>
-	    string Image { get; }
-
-	    /// <summary> 
-        /// Returns an array of characters that make up the suffix of length 'len' for
-		/// the currently matched token. This is used to build up the matched string
-		/// for use in actions in the case of MORE. A simple and inefficient
-		/// implementation of this is as follows :
-		/// 
-		/// {
-		/// String t = GetImage();
-		/// return t.substring(t.length() - len, t.length()).toCharArray();
-		/// }
-		/// </summary>
-		char[] GetSuffix(int len);
-		
-		/// <summary> 
-        /// The lexer calls this function to indicate that it is done with the stream
-		/// and hence implementations can free any resources held by this class.
-		/// Again, the body of this function can be just empty and it will not
-		/// affect the lexer's operation.
-		/// </summary>
-		void  Done();
-	}
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Classic/FastCharStream.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/FastCharStream.cs b/Lucene.Net.QueryParser/Classic/FastCharStream.cs
deleted file mode 100644
index b4da568..0000000
--- a/Lucene.Net.QueryParser/Classic/FastCharStream.cs
+++ /dev/null
@@ -1,158 +0,0 @@
-using System;
-
-namespace Lucene.Net.QueryParser.Classic
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-	
-	/// <summary>
-    /// An efficient implementation of JavaCC's CharStream interface.  <p/>Note that
-	/// this does not do line-number counting, but instead keeps track of the
-	/// character position of the token in the input, as required by Lucene's <see cref="Lucene.Net.Analysis.Token" />
-	/// API.
-	/// </summary>
-	public sealed class FastCharStream : ICharStream
-	{
-		internal char[] buffer = null;
-		
-		internal int bufferLength = 0; // end of valid chars
-		internal int bufferPosition = 0; // next char to read
-		
-		internal int tokenStart = 0; // offset in buffer
-		internal int bufferStart = 0; // position in file of buffer
-		
-		internal System.IO.TextReader input; // source of chars
-		
-		/// <summary>
-        /// Constructs from a Reader. 
-        /// </summary>
-		public FastCharStream(System.IO.TextReader r)
-		{
-			input = r;
-		}
-		
-		public char ReadChar()
-		{
-			if (bufferPosition >= bufferLength)
-				Refill();
-			return buffer[bufferPosition++];
-		}
-		
-		private void  Refill()
-		{
-			int newPosition = bufferLength - tokenStart;
-			
-			if (tokenStart == 0)
-			{
-				// token won't fit in buffer
-				if (buffer == null)
-				{
-					// first time: alloc buffer
-					buffer = new char[2048];
-				}
-				else if (bufferLength == buffer.Length)
-				{
-					// grow buffer
-					char[] newBuffer = new char[buffer.Length * 2];
-					Array.Copy(buffer, 0, newBuffer, 0, bufferLength);
-					buffer = newBuffer;
-				}
-			}
-			else
-			{
-				// shift token to front
-				Array.Copy(buffer, tokenStart, buffer, 0, newPosition);
-			}
-			
-			bufferLength = newPosition; // update state
-			bufferPosition = newPosition;
-			bufferStart += tokenStart;
-			tokenStart = 0;
-			
-			int charsRead = input.Read(buffer, newPosition, buffer.Length - newPosition);
-			if (charsRead <= 0)
-				throw new System.IO.IOException("read past eof");
-			else
-				bufferLength += charsRead;
-		}
-		
-		public char BeginToken()
-		{
-			tokenStart = bufferPosition;
-			return ReadChar();
-		}
-		
-		public void  Backup(int amount)
-		{
-			bufferPosition -= amount;
-		}
-
-	    public string Image
-	    {
-	        get { return new System.String(buffer, tokenStart, bufferPosition - tokenStart); }
-	    }
-
-	    public char[] GetSuffix(int len)
-		{
-			char[] value_Renamed = new char[len];
-			Array.Copy(buffer, bufferPosition - len, value_Renamed, 0, len);
-			return value_Renamed;
-		}
-		
-		public void Done()
-		{
-			try
-			{
-				input.Close();
-			}
-			catch (System.IO.IOException e)
-			{
-				System.Console.Error.WriteLine("Caught: " + e + "; ignoring.");
-			}
-		}
-
-	    public int Column
-	    {
-	        get { return bufferStart + bufferPosition; }
-	    }
-
-	    public int Line
-	    {
-	        get { return 1; }
-	    }
-
-	    public int EndColumn
-	    {
-	        get { return bufferStart + bufferPosition; }
-	    }
-
-	    public int EndLine
-	    {
-	        get { return 1; }
-	    }
-
-	    public int BeginColumn
-	    {
-	        get { return bufferStart + tokenStart; }
-	    }
-
-	    public int BeginLine
-	    {
-	        get { return 1; }
-	    }
-	}
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs b/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs
deleted file mode 100644
index 151fe38..0000000
--- a/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs
+++ /dev/null
@@ -1,404 +0,0 @@
-using System;
-using System.Collections.Generic;
-using Lucene.Net.Search;
-using Lucene.Net.Analysis;
-using Lucene.Net.Util;
-
-namespace Lucene.Net.QueryParser.Classic
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-
-    /// <summary> 
-    /// A QueryParser which constructs queries to search multiple fields.
-    /// </summary>
-    public class MultiFieldQueryParser : QueryParser
-    {
-        protected internal string[] fields;
-        protected internal IDictionary<string, float> boosts;
-
-        /// <summary> 
-        /// Creates a MultiFieldQueryParser. Allows passing of a map with term to
-        /// Boost, and the boost to apply to each term.
-        /// 
-        /// <p/>
-        /// It will, when parse(String query) is called, construct a query like this
-        /// (assuming the query consists of two terms and you specify the two fields
-        /// <c>title</c> and <c>body</c>):
-        /// <p/>
-        /// 
-        /// <code>
-        /// (title:term1 body:term1) (title:term2 body:term2)
-        /// </code>
-        /// 
-        /// <p/>
-        /// When setDefaultOperator(AND_OPERATOR) is set, the result will be:
-        /// <p/>
-        /// 
-        /// <code>
-        /// +(title:term1 body:term1) +(title:term2 body:term2)
-        /// </code>
-        /// 
-        /// <p/>
-        /// When you pass a boost (title=>5 body=>10) you can get
-        /// <p/>
-        /// 
-        /// <code>
-        /// +(title:term1^5.0 body:term1^10.0) +(title:term2^5.0 body:term2^10.0)
-        /// </code>
-        /// 
-        /// <p/>
-        /// In other words, all the query's terms must appear, but it doesn't matter
-        /// in what fields they appear.
-        /// <p/>
-        /// </summary>
-        public MultiFieldQueryParser(LuceneVersion matchVersion, string[] fields, Analyzer analyzer, IDictionary<string, float> boosts)
-            : this(matchVersion, fields, analyzer)
-        {
-            this.boosts = boosts;
-        }
-
-        /// <summary> 
-        /// Creates a MultiFieldQueryParser.
-        /// 
-        /// <p/>
-        /// It will, when parse(String query) is called, construct a query like this
-        /// (assuming the query consists of two terms and you specify the two fields
-        /// <c>title</c> and <c>body</c>):
-        /// <p/>
-        /// 
-        /// <code>
-        /// (title:term1 body:term1) (title:term2 body:term2)
-        /// </code>
-        /// 
-        /// <p/>
-        /// When setDefaultOperator(AND_OPERATOR) is set, the result will be:
-        /// <p/>
-        /// 
-        /// <code>
-        /// +(title:term1 body:term1) +(title:term2 body:term2)
-        /// </code>
-        /// 
-        /// <p/>
-        /// In other words, all the query's terms must appear, but it doesn't matter
-        /// in what fields they appear.
-        /// <p/>
-        /// </summary>
-        public MultiFieldQueryParser(LuceneVersion matchVersion, string[] fields, Analyzer analyzer)
-            : base(matchVersion, null, analyzer)
-        {
-            this.fields = fields;
-        }
-
-        protected internal override Query GetFieldQuery(string field, string queryText, int slop)
-        {
-            if (field == null)
-            {
-                IList<BooleanClause> clauses = new List<BooleanClause>();
-                for (int i = 0; i < fields.Length; i++)
-                {
-                    Query q = base.GetFieldQuery(fields[i], queryText, true);
-                    if (q != null)
-                    {
-                        //If the user passes a map of boosts
-                        if (boosts != null)
-                        {
-                            //Get the boost from the map and apply them
-                            float boost = boosts[fields[i]];
-                            q.Boost = boost;
-                        }
-                        ApplySlop(q, slop);
-                        clauses.Add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
-                    }
-                }
-                if (clauses.Count == 0)
-                    // happens for stopwords
-                    return null;
-                return GetBooleanQuery(clauses, true);
-            }
-            Query q2 = base.GetFieldQuery(field, queryText, true);
-            ApplySlop(q2, slop);
-            return q2;
-        }
-
-        private void ApplySlop(Query q, int slop)
-        {
-            if (q is PhraseQuery)
-            {
-                ((PhraseQuery)q).Slop = slop;
-            }
-            else if (q is MultiPhraseQuery)
-            {
-                ((MultiPhraseQuery)q).Slop = slop;
-            }
-        }
-
-        protected internal override Query GetFieldQuery(string field, string queryText, bool quoted)
-        {
-            if (field == null)
-            {
-                IList<BooleanClause> clauses = new List<BooleanClause>();
-                for (int i = 0; i < fields.Length; i++)
-                {
-                    Query q = base.GetFieldQuery(fields[i], queryText, quoted);
-                    if (q != null)
-                    {
-                        //If the user passes a map of boosts
-                        if (boosts != null)
-                        {
-                            //Get the boost from the map and apply them
-                            float boost = boosts[fields[i]];
-                            q.Boost = boost;
-                        }
-                        clauses.Add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
-                    }
-                }
-                if (clauses.Count == 0)  // happens for stopwords
-                    return null;
-                return GetBooleanQuery(clauses, true);
-            }
-            Query q2 = base.GetFieldQuery(field, queryText, quoted);
-            return q2;
-        }
-
-        protected internal override Query GetFuzzyQuery(string field, string termStr, float minSimilarity)
-        {
-            if (field == null)
-            {
-                IList<BooleanClause> clauses = new List<BooleanClause>();
-                for (int i = 0; i < fields.Length; i++)
-                {
-                    clauses.Add(new BooleanClause(GetFuzzyQuery(fields[i], termStr, minSimilarity), BooleanClause.Occur.SHOULD));
-                }
-                return GetBooleanQuery(clauses, true);
-            }
-            return base.GetFuzzyQuery(field, termStr, minSimilarity);
-        }
-
-        protected internal override Query GetPrefixQuery(System.String field, System.String termStr)
-        {
-            if (field == null)
-            {
-                IList<BooleanClause> clauses = new List<BooleanClause>();
-                for (int i = 0; i < fields.Length; i++)
-                {
-                    clauses.Add(new BooleanClause(GetPrefixQuery(fields[i], termStr), BooleanClause.Occur.SHOULD));
-                }
-                return GetBooleanQuery(clauses, true);
-            }
-            return base.GetPrefixQuery(field, termStr);
-        }
-
-        protected internal override Query GetWildcardQuery(string field, string termStr)
-        {
-            if (field == null)
-            {
-                IList<BooleanClause> clauses = new List<BooleanClause>();
-                for (int i = 0; i < fields.Length; i++)
-                {
-                    clauses.Add(new BooleanClause(GetWildcardQuery(fields[i], termStr), BooleanClause.Occur.SHOULD));
-                }
-                return GetBooleanQuery(clauses, true);
-            }
-            return base.GetWildcardQuery(field, termStr);
-        }
-
-
-        protected internal override Query GetRangeQuery(string field, string part1, string part2, bool startInclusive, bool endInclusive)
-        {
-            if (field == null)
-            {
-                IList<BooleanClause> clauses = new List<BooleanClause>();
-                for (int i = 0; i < fields.Length; i++)
-                {
-                    clauses.Add(new BooleanClause(GetRangeQuery(fields[i], part1, part2, startInclusive, endInclusive), BooleanClause.Occur.SHOULD));
-                }
-                return GetBooleanQuery(clauses, true);
-            }
-            return base.GetRangeQuery(field, part1, part2, startInclusive, endInclusive);
-        }
-
-        protected internal override Query GetRegexpQuery(string field, string termStr)
-        {
-            if (field == null)
-            {
-                IList<BooleanClause> clauses = new List<BooleanClause>();
-                for (int i = 0; i < fields.Length; i++)
-                {
-                    clauses.Add(new BooleanClause(GetRegexpQuery(fields[i], termStr),
-                        BooleanClause.Occur.SHOULD));
-                }
-                return GetBooleanQuery(clauses, true);
-            }
-            return base.GetRegexpQuery(field, termStr);
-        }
-
-        /// <summary> 
-        /// Parses a query which searches on the fields specified.
-        /// <p/>
-        /// If x fields are specified, this effectively constructs:
-        /// 
-        /// <code>
-        /// (field1:query1) (field2:query2) (field3:query3)...(fieldx:queryx)
-        /// </code>
-        /// 
-        /// </summary>
-        /// <param name="matchVersion">Lucene version to match; this is passed through to
-        /// QueryParser.
-        /// </param>
-        /// <param name="queries">Queries strings to parse
-        /// </param>
-        /// <param name="fields">Fields to search on
-        /// </param>
-        /// <param name="analyzer">Analyzer to use
-        /// </param>
-        /// <throws>  ParseException </throws>
-        /// <summary>             if query parsing fails
-        /// </summary>
-        /// <throws>  IllegalArgumentException </throws>
-        /// <summary>             if the length of the queries array differs from the length of
-        /// the fields array
-        /// </summary>
-        public static Query Parse(LuceneVersion matchVersion, string[] queries, string[] fields, Analyzer analyzer)
-        {
-            if (queries.Length != fields.Length)
-                throw new System.ArgumentException("queries.length != fields.length");
-            BooleanQuery bQuery = new BooleanQuery();
-            for (int i = 0; i < fields.Length; i++)
-            {
-                QueryParser qp = new QueryParser(matchVersion, fields[i], analyzer);
-                Query q = qp.Parse(queries[i]);
-                if (q != null && (!(q is BooleanQuery) || ((BooleanQuery)q).GetClauses().Count > 0))
-                {
-                    bQuery.Add(q, BooleanClause.Occur.SHOULD);
-                }
-            }
-            return bQuery;
-        }
-
-        /// <summary> 
-        /// Parses a query, searching on the fields specified. Use this if you need
-        /// to specify certain fields as required, and others as prohibited.
-        /// <p/>
-        /// Uasge:
-        /// <code>
-        /// String[] fields = {&quot;filename&quot;, &quot;contents&quot;, &quot;description&quot;};
-        /// BooleanClause.Occur[] flags = {BooleanClause.Occur.SHOULD,
-        /// BooleanClause.Occur.MUST,
-        /// BooleanClause.Occur.MUST_NOT};
-        /// MultiFieldQueryParser.parse(&quot;query&quot;, fields, flags, analyzer);
-        /// </code>
-        /// <p/>
-        /// The code above would construct a query:
-        /// 
-        /// <code>
-        /// (filename:query) +(contents:query) -(description:query)
-        /// </code>
-        /// 
-        /// </summary>
-        /// <param name="matchVersion">Lucene version to match; this is passed through to
-        /// QueryParser.
-        /// </param>
-        /// <param name="query">Query string to parse
-        /// </param>
-        /// <param name="fields">Fields to search on
-        /// </param>
-        /// <param name="flags">Flags describing the fields
-        /// </param>
-        /// <param name="analyzer">Analyzer to use
-        /// </param>
-        /// <throws>  ParseException </throws>
-        /// <summary>             if query parsing fails
-        /// </summary>
-        /// <throws>  IllegalArgumentException </throws>
-        /// <summary>             if the length of the fields array differs from the length of
-        /// the flags array
-        /// </summary>
-        public static Query Parse(LuceneVersion matchVersion, string query, string[] fields, BooleanClause.Occur[] flags, Analyzer analyzer)
-        {
-            if (fields.Length != flags.Length)
-                throw new System.ArgumentException("fields.length != flags.length");
-            BooleanQuery bQuery = new BooleanQuery();
-            for (int i = 0; i < fields.Length; i++)
-            {
-                QueryParser qp = new QueryParser(matchVersion, fields[i], analyzer);
-                Query q = qp.Parse(query);
-                if (q != null && (!(q is BooleanQuery) || ((BooleanQuery)q).GetClauses().Count > 0))
-                {
-                    bQuery.Add(q, flags[i]);
-                }
-            }
-            return bQuery;
-        }
-
-        /// <summary> 
-        /// Parses a query, searching on the fields specified. Use this if you need
-        /// to specify certain fields as required, and others as prohibited.
-        /// <p/>
-        /// Usage:
-        /// <code>
-        /// String[] query = {&quot;query1&quot;, &quot;query2&quot;, &quot;query3&quot;};
-        /// String[] fields = {&quot;filename&quot;, &quot;contents&quot;, &quot;description&quot;};
-        /// BooleanClause.Occur[] flags = {BooleanClause.Occur.SHOULD,
-        /// BooleanClause.Occur.MUST,
-        /// BooleanClause.Occur.MUST_NOT};
-        /// MultiFieldQueryParser.parse(query, fields, flags, analyzer);
-        /// </code>
-        /// <p/>
-        /// The code above would construct a query:
-        /// 
-        /// <code>
-        /// (filename:query1) +(contents:query2) -(description:query3)
-        /// </code>
-        /// 
-        /// </summary>
-        /// <param name="matchVersion">Lucene version to match; this is passed through to
-        /// QueryParser.
-        /// </param>
-        /// <param name="queries">Queries string to parse
-        /// </param>
-        /// <param name="fields">Fields to search on
-        /// </param>
-        /// <param name="flags">Flags describing the fields
-        /// </param>
-        /// <param name="analyzer">Analyzer to use
-        /// </param>
-        /// <throws>  ParseException </throws>
-        /// <summary>             if query parsing fails
-        /// </summary>
-        /// <throws>  IllegalArgumentException </throws>
-        /// <summary>             if the length of the queries, fields, and flags array differ
-        /// </summary>
-        public static Query Parse(LuceneVersion matchVersion, string[] queries, string[] fields, BooleanClause.Occur[] flags, Analyzer analyzer)
-        {
-            if (!(queries.Length == fields.Length && queries.Length == flags.Length))
-                throw new System.ArgumentException("queries, fields, and flags array have have different length");
-            BooleanQuery bQuery = new BooleanQuery();
-            for (int i = 0; i < fields.Length; i++)
-            {
-                QueryParser qp = new QueryParser(matchVersion, fields[i], analyzer);
-                Query q = qp.Parse(queries[i]);
-                if (q != null && (!(q is BooleanQuery) || ((BooleanQuery)q).GetClauses().Count > 0))
-                {
-                    bQuery.Add(q, flags[i]);
-                }
-            }
-            return bQuery;
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Classic/ParseException.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/ParseException.cs b/Lucene.Net.QueryParser/Classic/ParseException.cs
deleted file mode 100644
index 161fa95..0000000
--- a/Lucene.Net.QueryParser/Classic/ParseException.cs
+++ /dev/null
@@ -1,235 +0,0 @@
-using System;
-using System.Text;
-using Lucene.Net.Support;
-
-namespace Lucene.Net.QueryParser.Classic
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-	/// <summary> 
-    /// This exception is thrown when parse errors are encountered.
-	/// You can explicitly create objects of this exception type by
-	/// calling the method GenerateParseException in the generated
-	/// parser.
-	/// 
-	/// You can modify this class to customize your error reporting
-	/// mechanisms so long as you retain the public fields.
-	/// </summary>
-	[Serializable]
-	public class ParseException : Exception
-	{
-        /// <summary>
-        /// This constructor is used by the method "GenerateParseException"
-        /// in the generated parser.  Calling this constructor generates
-        /// a new object of this type with the fields "currentToken",
-        /// "expectedTokenSequences", and "tokenImage" set.
-        /// </summary>
-        /// <param name="currentTokenVal"></param>
-        /// <param name="expectedTokenSequencesVal"></param>
-        /// <param name="tokenImageVal"></param>
-        public ParseException(Token currentTokenVal,
-                        int[][] expectedTokenSequencesVal,
-                        string[] tokenImageVal)
-            : base(Initialize(currentTokenVal, expectedTokenSequencesVal, tokenImageVal))
-        {
-            currentToken = currentTokenVal;
-            expectedTokenSequences = expectedTokenSequencesVal;
-            tokenImage = tokenImageVal;
-        }
-
-        /**
-         * The following constructors are for use by you for whatever
-         * purpose you can think of.  Constructing the exception in this
-         * manner makes the exception behave in the normal way - i.e., as
-         * documented in the class "Throwable".  The fields "errorToken",
-         * "expectedTokenSequences", and "tokenImage" do not contain
-         * relevant information.  The JavaCC generated code does not use
-         * these constructors.
-         */
-
-        public ParseException()
-        { }
-
-        public ParseException(string message)
-            : base(message)
-        { }
-
-        public ParseException(string message, Exception innerException)
-            : base(message, innerException)
-        { }
-
-
-        /// <summary> 
-        /// This is the last token that has been consumed successfully.  If
-        /// this object has been created due to a parse error, the token
-        /// following this token will (therefore) be the first error token.
-        /// </summary>
-        public Token currentToken;
-
-        /// <summary> 
-        /// Each entry in this array is an array of integers.  Each array
-        /// of integers represents a sequence of tokens (by their ordinal
-        /// values) that is expected at this point of the parse.
-        /// </summary>
-        public int[][] expectedTokenSequences;
-
-        /// <summary> 
-        /// This is a reference to the "tokenImage" array of the generated
-        /// parser within which the parse error occurred.  This array is
-        /// defined in the generated ...Constants interface.
-        /// </summary>
-        public string[] tokenImage;
-
-
-        /// <summary>
-        /// It uses "currentToken" and "expectedTokenSequences" to generate a parse
-        /// error message and returns it.  If this object has been created
-        /// due to a parse error, and you do not catch it (it gets thrown
-        /// from the parser) the correct error message
-        /// gets displayed.
-        /// </summary>
-        /// <param name="currentToken"></param>
-        /// <param name="expectedTokenSequences"></param>
-        /// <param name="tokenImage"></param>
-        /// <returns></returns>
-        private static string Initialize(Token currentToken,
-            int[][] expectedTokenSequences,
-            string[] tokenImage)
-        {
-
-            StringBuilder expected = new StringBuilder();
-            int maxSize = 0;
-            for (int i = 0; i < expectedTokenSequences.Length; i++)
-            {
-                if (maxSize < expectedTokenSequences[i].Length)
-                {
-                    maxSize = expectedTokenSequences[i].Length;
-                }
-                for (int j = 0; j < expectedTokenSequences[i].Length; j++)
-                {
-                    expected.Append(tokenImage[expectedTokenSequences[i][j]]).Append(' ');
-                }
-                if (expectedTokenSequences[i][expectedTokenSequences[i].Length - 1] != 0)
-                {
-                    expected.Append("...");
-                }
-                expected.Append(eol).Append("    ");
-            }
-            string retval = "Encountered \"";
-            Token tok = currentToken.next;
-            for (int i = 0; i < maxSize; i++)
-            {
-                if (i != 0)
-                    retval += " ";
-                if (tok.kind == 0)
-                {
-                    retval += tokenImage[0];
-                    break;
-                }
-                retval += (" " + tokenImage[tok.kind]);
-                retval += " \"";
-                retval += Add_escapes(tok.image);
-                retval += " \"";
-                tok = tok.next;
-            }
-            retval += ("\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn);
-            retval += ("." + eol);
-            if (expectedTokenSequences.Length == 1)
-            {
-                retval += ("Was expecting:" + eol + "    ");
-            }
-            else
-            {
-                retval += ("Was expecting one of:" + eol + "    ");
-            }
-            retval += expected.ToString();
-            return retval;
-        }
-		
-		/// <summary> 
-        /// The end of line string for this machine.
-        /// </summary>
-		protected static string eol = Environment.NewLine;
-		
-		/// <summary> 
-        /// Used to convert raw characters to their escaped version
-		/// when these raw version cannot be used as part of an ASCII
-		/// string literal.
-		/// </summary>
-		internal static string Add_escapes(string str)
-		{
-			StringBuilder retval = new StringBuilder();
-			char ch;
-			for (int i = 0; i < str.Length; i++)
-			{
-				switch (str[i])
-				{
-					
-					case (char) (0): 
-						continue;
-					
-					case '\b': 
-						retval.Append("\\b");
-						continue;
-					
-					case '\t': 
-						retval.Append("\\t");
-						continue;
-					
-					case '\n': 
-						retval.Append("\\n");
-						continue;
-					
-					case '\f': 
-						retval.Append("\\f");
-						continue;
-					
-					case '\r': 
-						retval.Append("\\r");
-						continue;
-					
-					case '\"': 
-						retval.Append("\\\"");
-						continue;
-					
-					case '\'': 
-						retval.Append("\\\'");
-						continue;
-					
-					case '\\': 
-						retval.Append("\\\\");
-						continue;
-					
-					default: 
-						if ((ch = str[i]) < 0x20 || ch > 0x7e)
-						{
-							System.String s = "0000" + System.Convert.ToString(ch, 16);
-							retval.Append("\\u" + s.Substring(s.Length - 4, (s.Length) - (s.Length - 4)));
-						}
-						else
-						{
-							retval.Append(ch);
-						}
-						continue;
-					
-				}
-			}
-			return retval.ToString();
-		}
-	}
-}
\ No newline at end of file


[18/50] [abbrv] lucenenet git commit: Updated comments.

Posted by sy...@apache.org.
Updated comments.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/0f10c9f5
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/0f10c9f5
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/0f10c9f5

Branch: refs/heads/master
Commit: 0f10c9f5424f579861f8bb86a241aab7de3f0821
Parents: 1e7576a
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Tue Aug 2 14:48:49 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:30:44 2016 +0700

----------------------------------------------------------------------
 Lucene.Net.QueryParser/Classic/ParseException.cs   | 4 ++--
 Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/0f10c9f5/Lucene.Net.QueryParser/Classic/ParseException.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/ParseException.cs b/Lucene.Net.QueryParser/Classic/ParseException.cs
index 0ccaddd..161fa95 100644
--- a/Lucene.Net.QueryParser/Classic/ParseException.cs
+++ b/Lucene.Net.QueryParser/Classic/ParseException.cs
@@ -24,7 +24,7 @@ namespace Lucene.Net.QueryParser.Classic
 	/// <summary> 
     /// This exception is thrown when parse errors are encountered.
 	/// You can explicitly create objects of this exception type by
-	/// calling the method generateParseException in the generated
+	/// calling the method GenerateParseException in the generated
 	/// parser.
 	/// 
 	/// You can modify this class to customize your error reporting
@@ -34,7 +34,7 @@ namespace Lucene.Net.QueryParser.Classic
 	public class ParseException : Exception
 	{
         /// <summary>
-        /// This constructor is used by the method "generateParseException"
+        /// This constructor is used by the method "GenerateParseException"
         /// in the generated parser.  Calling this constructor generates
         /// a new object of this type with the fields "currentToken",
         /// "expectedTokenSequences", and "tokenImage" set.

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/0f10c9f5/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs b/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
index 8607d27..1029c8b 100644
--- a/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
+++ b/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
@@ -95,7 +95,7 @@ namespace Lucene.Net.QueryParser.Simple
         /** Map of fields to query against with their weights */
         protected readonly IDictionary<string, float> weights;
 
-        // TODO: Make these into a [Flags] enum??
+        // TODO: Make these into a [Flags] enum in .NET??
         /** flags to the parser (to turn features on/off) */
         protected readonly int flags;
 


[10/50] [abbrv] lucenenet git commit: Fixed bug in Lucene.Net.Util.ToStringUtils that was causing issues with the QueryParser tests. Rolled the Boost(float) method back to the Lucene.Net 3.0.3 state.

Posted by sy...@apache.org.
Fixed bug in Lucene.Net.Util.ToStringUtils that was causing issues with the QueryParser tests. Rolled the Boost(float) method back to the Lucene.Net 3.0.3 state.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/1937dda3
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/1937dda3
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/1937dda3

Branch: refs/heads/master
Commit: 1937dda38914c032f204cdf23b91157246a63de5
Parents: 11ecedc
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Jul 31 21:37:32 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:30:15 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Util/ToStringUtils.cs | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1937dda3/src/Lucene.Net.Core/Util/ToStringUtils.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/ToStringUtils.cs b/src/Lucene.Net.Core/Util/ToStringUtils.cs
index d3d97c4..e5f5a0d 100644
--- a/src/Lucene.Net.Core/Util/ToStringUtils.cs
+++ b/src/Lucene.Net.Core/Util/ToStringUtils.cs
@@ -1,4 +1,5 @@
 using System;
+using System.Globalization;
 using System.Text;
 
 namespace Lucene.Net.Util
@@ -36,12 +37,13 @@ namespace Lucene.Net.Util
         {
             if (boost != 1.0f)
             {
-                return "^" + Convert.ToString(boost);
+                float boostAsLong = (long)boost;
+                if (boostAsLong == boost)
+                    return "^" + boost.ToString(".0").Replace(CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator, ".");
+                return "^" + boost.ToString().Replace(CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator, ".");
             }
             else
-            {
                 return "";
-            }
         }
 
         public static void ByteArray(StringBuilder buffer, byte[] bytes)


[27/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs b/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs
new file mode 100644
index 0000000..151fe38
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Classic/MultiFieldQueryParser.cs
@@ -0,0 +1,404 @@
+using System;
+using System.Collections.Generic;
+using Lucene.Net.Search;
+using Lucene.Net.Analysis;
+using Lucene.Net.Util;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+
+    /// <summary> 
+    /// A QueryParser which constructs queries to search multiple fields.
+    /// </summary>
+    public class MultiFieldQueryParser : QueryParser
+    {
+        protected internal string[] fields;
+        protected internal IDictionary<string, float> boosts;
+
+        /// <summary> 
+        /// Creates a MultiFieldQueryParser. Allows passing of a map with term to
+        /// Boost, and the boost to apply to each term.
+        /// 
+        /// <p/>
+        /// It will, when parse(String query) is called, construct a query like this
+        /// (assuming the query consists of two terms and you specify the two fields
+        /// <c>title</c> and <c>body</c>):
+        /// <p/>
+        /// 
+        /// <code>
+        /// (title:term1 body:term1) (title:term2 body:term2)
+        /// </code>
+        /// 
+        /// <p/>
+        /// When setDefaultOperator(AND_OPERATOR) is set, the result will be:
+        /// <p/>
+        /// 
+        /// <code>
+        /// +(title:term1 body:term1) +(title:term2 body:term2)
+        /// </code>
+        /// 
+        /// <p/>
+        /// When you pass a boost (title=>5 body=>10) you can get
+        /// <p/>
+        /// 
+        /// <code>
+        /// +(title:term1^5.0 body:term1^10.0) +(title:term2^5.0 body:term2^10.0)
+        /// </code>
+        /// 
+        /// <p/>
+        /// In other words, all the query's terms must appear, but it doesn't matter
+        /// in what fields they appear.
+        /// <p/>
+        /// </summary>
+        public MultiFieldQueryParser(LuceneVersion matchVersion, string[] fields, Analyzer analyzer, IDictionary<string, float> boosts)
+            : this(matchVersion, fields, analyzer)
+        {
+            this.boosts = boosts;
+        }
+
+        /// <summary> 
+        /// Creates a MultiFieldQueryParser.
+        /// 
+        /// <p/>
+        /// It will, when parse(String query) is called, construct a query like this
+        /// (assuming the query consists of two terms and you specify the two fields
+        /// <c>title</c> and <c>body</c>):
+        /// <p/>
+        /// 
+        /// <code>
+        /// (title:term1 body:term1) (title:term2 body:term2)
+        /// </code>
+        /// 
+        /// <p/>
+        /// When setDefaultOperator(AND_OPERATOR) is set, the result will be:
+        /// <p/>
+        /// 
+        /// <code>
+        /// +(title:term1 body:term1) +(title:term2 body:term2)
+        /// </code>
+        /// 
+        /// <p/>
+        /// In other words, all the query's terms must appear, but it doesn't matter
+        /// in what fields they appear.
+        /// <p/>
+        /// </summary>
+        public MultiFieldQueryParser(LuceneVersion matchVersion, string[] fields, Analyzer analyzer)
+            : base(matchVersion, null, analyzer)
+        {
+            this.fields = fields;
+        }
+
+        protected internal override Query GetFieldQuery(string field, string queryText, int slop)
+        {
+            if (field == null)
+            {
+                IList<BooleanClause> clauses = new List<BooleanClause>();
+                for (int i = 0; i < fields.Length; i++)
+                {
+                    Query q = base.GetFieldQuery(fields[i], queryText, true);
+                    if (q != null)
+                    {
+                        //If the user passes a map of boosts
+                        if (boosts != null)
+                        {
+                            //Get the boost from the map and apply them
+                            float boost = boosts[fields[i]];
+                            q.Boost = boost;
+                        }
+                        ApplySlop(q, slop);
+                        clauses.Add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
+                    }
+                }
+                if (clauses.Count == 0)
+                    // happens for stopwords
+                    return null;
+                return GetBooleanQuery(clauses, true);
+            }
+            Query q2 = base.GetFieldQuery(field, queryText, true);
+            ApplySlop(q2, slop);
+            return q2;
+        }
+
+        private void ApplySlop(Query q, int slop)
+        {
+            if (q is PhraseQuery)
+            {
+                ((PhraseQuery)q).Slop = slop;
+            }
+            else if (q is MultiPhraseQuery)
+            {
+                ((MultiPhraseQuery)q).Slop = slop;
+            }
+        }
+
+        protected internal override Query GetFieldQuery(string field, string queryText, bool quoted)
+        {
+            if (field == null)
+            {
+                IList<BooleanClause> clauses = new List<BooleanClause>();
+                for (int i = 0; i < fields.Length; i++)
+                {
+                    Query q = base.GetFieldQuery(fields[i], queryText, quoted);
+                    if (q != null)
+                    {
+                        //If the user passes a map of boosts
+                        if (boosts != null)
+                        {
+                            //Get the boost from the map and apply them
+                            float boost = boosts[fields[i]];
+                            q.Boost = boost;
+                        }
+                        clauses.Add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
+                    }
+                }
+                if (clauses.Count == 0)  // happens for stopwords
+                    return null;
+                return GetBooleanQuery(clauses, true);
+            }
+            Query q2 = base.GetFieldQuery(field, queryText, quoted);
+            return q2;
+        }
+
+        protected internal override Query GetFuzzyQuery(string field, string termStr, float minSimilarity)
+        {
+            if (field == null)
+            {
+                IList<BooleanClause> clauses = new List<BooleanClause>();
+                for (int i = 0; i < fields.Length; i++)
+                {
+                    clauses.Add(new BooleanClause(GetFuzzyQuery(fields[i], termStr, minSimilarity), BooleanClause.Occur.SHOULD));
+                }
+                return GetBooleanQuery(clauses, true);
+            }
+            return base.GetFuzzyQuery(field, termStr, minSimilarity);
+        }
+
+        protected internal override Query GetPrefixQuery(System.String field, System.String termStr)
+        {
+            if (field == null)
+            {
+                IList<BooleanClause> clauses = new List<BooleanClause>();
+                for (int i = 0; i < fields.Length; i++)
+                {
+                    clauses.Add(new BooleanClause(GetPrefixQuery(fields[i], termStr), BooleanClause.Occur.SHOULD));
+                }
+                return GetBooleanQuery(clauses, true);
+            }
+            return base.GetPrefixQuery(field, termStr);
+        }
+
+        protected internal override Query GetWildcardQuery(string field, string termStr)
+        {
+            if (field == null)
+            {
+                IList<BooleanClause> clauses = new List<BooleanClause>();
+                for (int i = 0; i < fields.Length; i++)
+                {
+                    clauses.Add(new BooleanClause(GetWildcardQuery(fields[i], termStr), BooleanClause.Occur.SHOULD));
+                }
+                return GetBooleanQuery(clauses, true);
+            }
+            return base.GetWildcardQuery(field, termStr);
+        }
+
+
+        protected internal override Query GetRangeQuery(string field, string part1, string part2, bool startInclusive, bool endInclusive)
+        {
+            if (field == null)
+            {
+                IList<BooleanClause> clauses = new List<BooleanClause>();
+                for (int i = 0; i < fields.Length; i++)
+                {
+                    clauses.Add(new BooleanClause(GetRangeQuery(fields[i], part1, part2, startInclusive, endInclusive), BooleanClause.Occur.SHOULD));
+                }
+                return GetBooleanQuery(clauses, true);
+            }
+            return base.GetRangeQuery(field, part1, part2, startInclusive, endInclusive);
+        }
+
+        protected internal override Query GetRegexpQuery(string field, string termStr)
+        {
+            if (field == null)
+            {
+                IList<BooleanClause> clauses = new List<BooleanClause>();
+                for (int i = 0; i < fields.Length; i++)
+                {
+                    clauses.Add(new BooleanClause(GetRegexpQuery(fields[i], termStr),
+                        BooleanClause.Occur.SHOULD));
+                }
+                return GetBooleanQuery(clauses, true);
+            }
+            return base.GetRegexpQuery(field, termStr);
+        }
+
+        /// <summary> 
+        /// Parses a query which searches on the fields specified.
+        /// <p/>
+        /// If x fields are specified, this effectively constructs:
+        /// 
+        /// <code>
+        /// (field1:query1) (field2:query2) (field3:query3)...(fieldx:queryx)
+        /// </code>
+        /// 
+        /// </summary>
+        /// <param name="matchVersion">Lucene version to match; this is passed through to
+        /// QueryParser.
+        /// </param>
+        /// <param name="queries">Queries strings to parse
+        /// </param>
+        /// <param name="fields">Fields to search on
+        /// </param>
+        /// <param name="analyzer">Analyzer to use
+        /// </param>
+        /// <throws>  ParseException </throws>
+        /// <summary>             if query parsing fails
+        /// </summary>
+        /// <throws>  IllegalArgumentException </throws>
+        /// <summary>             if the length of the queries array differs from the length of
+        /// the fields array
+        /// </summary>
+        public static Query Parse(LuceneVersion matchVersion, string[] queries, string[] fields, Analyzer analyzer)
+        {
+            if (queries.Length != fields.Length)
+                throw new System.ArgumentException("queries.length != fields.length");
+            BooleanQuery bQuery = new BooleanQuery();
+            for (int i = 0; i < fields.Length; i++)
+            {
+                QueryParser qp = new QueryParser(matchVersion, fields[i], analyzer);
+                Query q = qp.Parse(queries[i]);
+                if (q != null && (!(q is BooleanQuery) || ((BooleanQuery)q).GetClauses().Count > 0))
+                {
+                    bQuery.Add(q, BooleanClause.Occur.SHOULD);
+                }
+            }
+            return bQuery;
+        }
+
+        /// <summary> 
+        /// Parses a query, searching on the fields specified. Use this if you need
+        /// to specify certain fields as required, and others as prohibited.
+        /// <p/>
+        /// Uasge:
+        /// <code>
+        /// String[] fields = {&quot;filename&quot;, &quot;contents&quot;, &quot;description&quot;};
+        /// BooleanClause.Occur[] flags = {BooleanClause.Occur.SHOULD,
+        /// BooleanClause.Occur.MUST,
+        /// BooleanClause.Occur.MUST_NOT};
+        /// MultiFieldQueryParser.parse(&quot;query&quot;, fields, flags, analyzer);
+        /// </code>
+        /// <p/>
+        /// The code above would construct a query:
+        /// 
+        /// <code>
+        /// (filename:query) +(contents:query) -(description:query)
+        /// </code>
+        /// 
+        /// </summary>
+        /// <param name="matchVersion">Lucene version to match; this is passed through to
+        /// QueryParser.
+        /// </param>
+        /// <param name="query">Query string to parse
+        /// </param>
+        /// <param name="fields">Fields to search on
+        /// </param>
+        /// <param name="flags">Flags describing the fields
+        /// </param>
+        /// <param name="analyzer">Analyzer to use
+        /// </param>
+        /// <throws>  ParseException </throws>
+        /// <summary>             if query parsing fails
+        /// </summary>
+        /// <throws>  IllegalArgumentException </throws>
+        /// <summary>             if the length of the fields array differs from the length of
+        /// the flags array
+        /// </summary>
+        public static Query Parse(LuceneVersion matchVersion, string query, string[] fields, BooleanClause.Occur[] flags, Analyzer analyzer)
+        {
+            if (fields.Length != flags.Length)
+                throw new System.ArgumentException("fields.length != flags.length");
+            BooleanQuery bQuery = new BooleanQuery();
+            for (int i = 0; i < fields.Length; i++)
+            {
+                QueryParser qp = new QueryParser(matchVersion, fields[i], analyzer);
+                Query q = qp.Parse(query);
+                if (q != null && (!(q is BooleanQuery) || ((BooleanQuery)q).GetClauses().Count > 0))
+                {
+                    bQuery.Add(q, flags[i]);
+                }
+            }
+            return bQuery;
+        }
+
+        /// <summary> 
+        /// Parses a query, searching on the fields specified. Use this if you need
+        /// to specify certain fields as required, and others as prohibited.
+        /// <p/>
+        /// Usage:
+        /// <code>
+        /// String[] query = {&quot;query1&quot;, &quot;query2&quot;, &quot;query3&quot;};
+        /// String[] fields = {&quot;filename&quot;, &quot;contents&quot;, &quot;description&quot;};
+        /// BooleanClause.Occur[] flags = {BooleanClause.Occur.SHOULD,
+        /// BooleanClause.Occur.MUST,
+        /// BooleanClause.Occur.MUST_NOT};
+        /// MultiFieldQueryParser.parse(query, fields, flags, analyzer);
+        /// </code>
+        /// <p/>
+        /// The code above would construct a query:
+        /// 
+        /// <code>
+        /// (filename:query1) +(contents:query2) -(description:query3)
+        /// </code>
+        /// 
+        /// </summary>
+        /// <param name="matchVersion">Lucene version to match; this is passed through to
+        /// QueryParser.
+        /// </param>
+        /// <param name="queries">Queries string to parse
+        /// </param>
+        /// <param name="fields">Fields to search on
+        /// </param>
+        /// <param name="flags">Flags describing the fields
+        /// </param>
+        /// <param name="analyzer">Analyzer to use
+        /// </param>
+        /// <throws>  ParseException </throws>
+        /// <summary>             if query parsing fails
+        /// </summary>
+        /// <throws>  IllegalArgumentException </throws>
+        /// <summary>             if the length of the queries, fields, and flags array differ
+        /// </summary>
+        public static Query Parse(LuceneVersion matchVersion, string[] queries, string[] fields, BooleanClause.Occur[] flags, Analyzer analyzer)
+        {
+            if (!(queries.Length == fields.Length && queries.Length == flags.Length))
+                throw new System.ArgumentException("queries, fields, and flags array have have different length");
+            BooleanQuery bQuery = new BooleanQuery();
+            for (int i = 0; i < fields.Length; i++)
+            {
+                QueryParser qp = new QueryParser(matchVersion, fields[i], analyzer);
+                Query q = qp.Parse(queries[i]);
+                if (q != null && (!(q is BooleanQuery) || ((BooleanQuery)q).GetClauses().Count > 0))
+                {
+                    bQuery.Add(q, flags[i]);
+                }
+            }
+            return bQuery;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Classic/ParseException.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/ParseException.cs b/src/Lucene.Net.QueryParser/Classic/ParseException.cs
new file mode 100644
index 0000000..161fa95
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Classic/ParseException.cs
@@ -0,0 +1,235 @@
+using System;
+using System.Text;
+using Lucene.Net.Support;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+	/// <summary> 
+    /// This exception is thrown when parse errors are encountered.
+	/// You can explicitly create objects of this exception type by
+	/// calling the method GenerateParseException in the generated
+	/// parser.
+	/// 
+	/// You can modify this class to customize your error reporting
+	/// mechanisms so long as you retain the public fields.
+	/// </summary>
+	[Serializable]
+	public class ParseException : Exception
+	{
+        /// <summary>
+        /// This constructor is used by the method "GenerateParseException"
+        /// in the generated parser.  Calling this constructor generates
+        /// a new object of this type with the fields "currentToken",
+        /// "expectedTokenSequences", and "tokenImage" set.
+        /// </summary>
+        /// <param name="currentTokenVal"></param>
+        /// <param name="expectedTokenSequencesVal"></param>
+        /// <param name="tokenImageVal"></param>
+        public ParseException(Token currentTokenVal,
+                        int[][] expectedTokenSequencesVal,
+                        string[] tokenImageVal)
+            : base(Initialize(currentTokenVal, expectedTokenSequencesVal, tokenImageVal))
+        {
+            currentToken = currentTokenVal;
+            expectedTokenSequences = expectedTokenSequencesVal;
+            tokenImage = tokenImageVal;
+        }
+
+        /**
+         * The following constructors are for use by you for whatever
+         * purpose you can think of.  Constructing the exception in this
+         * manner makes the exception behave in the normal way - i.e., as
+         * documented in the class "Throwable".  The fields "errorToken",
+         * "expectedTokenSequences", and "tokenImage" do not contain
+         * relevant information.  The JavaCC generated code does not use
+         * these constructors.
+         */
+
+        public ParseException()
+        { }
+
+        public ParseException(string message)
+            : base(message)
+        { }
+
+        public ParseException(string message, Exception innerException)
+            : base(message, innerException)
+        { }
+
+
+        /// <summary> 
+        /// This is the last token that has been consumed successfully.  If
+        /// this object has been created due to a parse error, the token
+        /// following this token will (therefore) be the first error token.
+        /// </summary>
+        public Token currentToken;
+
+        /// <summary> 
+        /// Each entry in this array is an array of integers.  Each array
+        /// of integers represents a sequence of tokens (by their ordinal
+        /// values) that is expected at this point of the parse.
+        /// </summary>
+        public int[][] expectedTokenSequences;
+
+        /// <summary> 
+        /// This is a reference to the "tokenImage" array of the generated
+        /// parser within which the parse error occurred.  This array is
+        /// defined in the generated ...Constants interface.
+        /// </summary>
+        public string[] tokenImage;
+
+
+        /// <summary>
+        /// It uses "currentToken" and "expectedTokenSequences" to generate a parse
+        /// error message and returns it.  If this object has been created
+        /// due to a parse error, and you do not catch it (it gets thrown
+        /// from the parser) the correct error message
+        /// gets displayed.
+        /// </summary>
+        /// <param name="currentToken"></param>
+        /// <param name="expectedTokenSequences"></param>
+        /// <param name="tokenImage"></param>
+        /// <returns></returns>
+        private static string Initialize(Token currentToken,
+            int[][] expectedTokenSequences,
+            string[] tokenImage)
+        {
+
+            StringBuilder expected = new StringBuilder();
+            int maxSize = 0;
+            for (int i = 0; i < expectedTokenSequences.Length; i++)
+            {
+                if (maxSize < expectedTokenSequences[i].Length)
+                {
+                    maxSize = expectedTokenSequences[i].Length;
+                }
+                for (int j = 0; j < expectedTokenSequences[i].Length; j++)
+                {
+                    expected.Append(tokenImage[expectedTokenSequences[i][j]]).Append(' ');
+                }
+                if (expectedTokenSequences[i][expectedTokenSequences[i].Length - 1] != 0)
+                {
+                    expected.Append("...");
+                }
+                expected.Append(eol).Append("    ");
+            }
+            string retval = "Encountered \"";
+            Token tok = currentToken.next;
+            for (int i = 0; i < maxSize; i++)
+            {
+                if (i != 0)
+                    retval += " ";
+                if (tok.kind == 0)
+                {
+                    retval += tokenImage[0];
+                    break;
+                }
+                retval += (" " + tokenImage[tok.kind]);
+                retval += " \"";
+                retval += Add_escapes(tok.image);
+                retval += " \"";
+                tok = tok.next;
+            }
+            retval += ("\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn);
+            retval += ("." + eol);
+            if (expectedTokenSequences.Length == 1)
+            {
+                retval += ("Was expecting:" + eol + "    ");
+            }
+            else
+            {
+                retval += ("Was expecting one of:" + eol + "    ");
+            }
+            retval += expected.ToString();
+            return retval;
+        }
+		
+		/// <summary> 
+        /// The end of line string for this machine.
+        /// </summary>
+		protected static string eol = Environment.NewLine;
+		
+		/// <summary> 
+        /// Used to convert raw characters to their escaped version
+		/// when these raw version cannot be used as part of an ASCII
+		/// string literal.
+		/// </summary>
+		internal static string Add_escapes(string str)
+		{
+			StringBuilder retval = new StringBuilder();
+			char ch;
+			for (int i = 0; i < str.Length; i++)
+			{
+				switch (str[i])
+				{
+					
+					case (char) (0): 
+						continue;
+					
+					case '\b': 
+						retval.Append("\\b");
+						continue;
+					
+					case '\t': 
+						retval.Append("\\t");
+						continue;
+					
+					case '\n': 
+						retval.Append("\\n");
+						continue;
+					
+					case '\f': 
+						retval.Append("\\f");
+						continue;
+					
+					case '\r': 
+						retval.Append("\\r");
+						continue;
+					
+					case '\"': 
+						retval.Append("\\\"");
+						continue;
+					
+					case '\'': 
+						retval.Append("\\\'");
+						continue;
+					
+					case '\\': 
+						retval.Append("\\\\");
+						continue;
+					
+					default: 
+						if ((ch = str[i]) < 0x20 || ch > 0x7e)
+						{
+							System.String s = "0000" + System.Convert.ToString(ch, 16);
+							retval.Append("\\u" + s.Substring(s.Length - 4, (s.Length) - (s.Length - 4)));
+						}
+						else
+						{
+							retval.Append(ch);
+						}
+						continue;
+					
+				}
+			}
+			return retval.ToString();
+		}
+	}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParser.cs b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
new file mode 100644
index 0000000..e86c716
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
@@ -0,0 +1,921 @@
+using Lucene.Net.Analysis;
+using Lucene.Net.Search;
+using Lucene.Net.Util;
+using System;
+using System.Collections.Generic;
+using System.IO;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary> This class is generated by JavaCC.  The most important method is
+    /// <see cref="Parse(String)" />.
+    /// 
+    /// The syntax for query strings is as follows:
+    /// A Query is a series of clauses.
+    /// A clause may be prefixed by:
+    /// <list type="bullet">
+    /// <item> a plus (<c>+</c>) or a minus (<c>-</c>) sign, indicating
+    /// that the clause is required or prohibited respectively; or</item>
+    /// <item> a term followed by a colon, indicating the field to be searched.
+    /// This enables one to construct queries which search multiple fields.</item>
+    /// </list>
+    /// 
+    /// A clause may be either:
+    /// <list type="bullet">
+    /// <item> a term, indicating all the documents that contain this term; or</item>
+    /// <item> a nested query, enclosed in parentheses.  Note that this may be used
+    /// with a <c>+</c>/<c>-</c> prefix to require any of a set of
+    /// terms.</item>
+    /// </list>
+    /// 
+    /// Thus, in BNF, the query grammar is:
+    /// <code>
+    /// Query  ::= ( Clause )*
+    /// Clause ::= ["+", "-"] [&lt;TERM&gt; ":"] ( &lt;TERM&gt; | "(" Query ")" )
+    /// </code>
+    /// 
+    /// <p/>
+    /// Examples of appropriately formatted queries can be found in the <a
+    /// href="../../../../../../queryparsersyntax.html">query syntax
+    /// documentation</a>.
+    /// <p/>
+    /// 
+    /// <p/>
+    /// In <see cref="TermRangeQuery" />s, QueryParser tries to detect date values, e.g.
+    /// <tt>date:[6/1/2005 TO 6/4/2005]</tt> produces a range query that searches
+    /// for "date" fields between 2005-06-01 and 2005-06-04. Note that the format
+    /// of the accepted input depends on the <see cref="Locale" />.
+    /// A <see cref="Lucene.Net.Documents.DateTools.Resolution" /> has to be set,
+    /// if you want to use <see cref="DateTools"/> for date conversion.<p/>
+    /// <p/>
+    /// The date resolution that shall be used for RangeQueries can be set
+    /// using <see cref="SetDateResolution(DateTools.Resolution)" />
+    /// or <see cref="SetDateResolution(String, DateTools.Resolution)" />. The former
+    /// sets the default date resolution for all fields, whereas the latter can
+    /// be used to set field specific date resolutions. Field specific date
+    /// resolutions take, if set, precedence over the default date resolution.
+    /// <p/>
+    /// <p/>
+    /// If you don't use <see cref="DateTools" /> in your index, you can create your own
+    /// query parser that inherits QueryParser and overwrites
+    /// <see cref="GetRangeQuery(String, String, String, bool)" /> to
+    /// use a different method for date conversion.
+    /// <p/>
+    /// 
+    /// <p/>Note that QueryParser is <em>not</em> thread-safe.<p/> 
+    /// 
+    /// <p/><b>NOTE</b>: there is a new QueryParser in contrib, which matches
+    /// the same syntax as this class, but is more modular,
+    /// enabling substantial customization to how a query is created.
+    /// 
+    /// <b>NOTE</b>: You must specify the required <see cref="LuceneVersion" /> compatibility when
+    /// creating QueryParser:
+    /// <list type="bullet">
+    /// <item>As of 3.1, <see cref="AutoGeneratePhraseQueries"/> is false by default.</item>
+    /// </list>
+    /// </summary>
+    public class QueryParser : QueryParserBase
+    {
+        // NOTE: This was moved into the QueryParserBase class.
+
+        ///* The default operator_Renamed for parsing queries. 
+        // * Use {@link QueryParser#setDefaultOperator} to change it.
+        // */
+
+        //public enum Operator
+        //{
+        //    OR,
+        //    AND
+        //}
+
+        /// <summary>
+        /// Constructs a query parser.
+        /// </summary>
+        /// <param name="matchVersion">Lucene version to match.</param>
+        /// <param name="f">the default field for query terms.</param>
+        /// <param name="a">used to find terms in the query text.</param>
+        public QueryParser(LuceneVersion matchVersion, string f, Analyzer a)
+            : this(new FastCharStream(new StringReader("")))
+        {
+            Init(matchVersion, f, a);
+        }
+
+        // *   Query  ::= ( Clause )*
+        // *   Clause ::= ["+", "-"] [<TermToken> ":"] ( <TermToken> | "(" Query ")" )
+        public int Conjunction()
+        {
+            int ret = CONJ_NONE;
+            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+            {
+                case RegexpToken.AND:
+                case RegexpToken.OR:
+                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                    {
+                        case RegexpToken.AND:
+                            Jj_consume_token(RegexpToken.AND);
+                            ret = CONJ_AND;
+                            break;
+                        case RegexpToken.OR:
+                            Jj_consume_token(RegexpToken.OR);
+                            ret = CONJ_OR;
+                            break;
+                        default:
+                            jj_la1[0] = jj_gen;
+                            Jj_consume_token(-1);
+                            throw new ParseException();
+                    }
+                    break;
+                default:
+                    jj_la1[1] = jj_gen;
+                    break;
+            }
+            {
+                if (true) return ret;
+            }
+            throw new ApplicationException("Missing return statement in function");
+        }
+
+        public int Modifiers()
+        {
+            int ret = MOD_NONE;
+            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+            {
+                case RegexpToken.NOT:
+                case RegexpToken.PLUS:
+                case RegexpToken.MINUS:
+                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                    {
+                        case RegexpToken.PLUS:
+                            Jj_consume_token(RegexpToken.PLUS);
+                            ret = MOD_REQ;
+                            break;
+                        case RegexpToken.MINUS:
+                            Jj_consume_token(RegexpToken.MINUS);
+                            ret = MOD_NOT;
+                            break;
+                        case RegexpToken.NOT:
+                            Jj_consume_token(RegexpToken.NOT);
+                            ret = MOD_NOT;
+                            break;
+                        default:
+                            jj_la1[2] = jj_gen;
+                            Jj_consume_token(-1);
+                            throw new ParseException();
+                    }
+                    break;
+                default:
+                    jj_la1[3] = jj_gen;
+                    break;
+            }
+            {
+                if (true) return ret;
+            }
+            throw new Exception("Missing return statement in function");
+        }
+
+        // This makes sure that there is no garbage after the query string
+        public override Query TopLevelQuery(string field)
+        {
+            Query q;
+            q = Query(field);
+            Jj_consume_token(0);
+            {
+                if (true) return q;
+            }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public Query Query(string field)
+        {
+            List<BooleanClause> clauses = new List<BooleanClause>();
+            Query q, firstQuery = null;
+            int conj, mods;
+            mods = Modifiers();
+            q = Clause(field);
+            AddClause(clauses, CONJ_NONE, mods, q);
+            if (mods == MOD_NONE)
+                firstQuery = q;
+            while (true)
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.AND:
+                    case RegexpToken.OR:
+                    case RegexpToken.NOT:
+                    case RegexpToken.PLUS:
+                    case RegexpToken.MINUS:
+                    case RegexpToken.BAREOPER:
+                    case RegexpToken.LPAREN:
+                    case RegexpToken.STAR:
+                    case RegexpToken.QUOTED:
+                    case RegexpToken.TERM:
+                    case RegexpToken.PREFIXTERM:
+                    case RegexpToken.WILDTERM:
+                    case RegexpToken.REGEXPTERM:
+                    case RegexpToken.RANGEIN_START:
+                    case RegexpToken.RANGEEX_START:
+                    case RegexpToken.NUMBER:
+                        break;
+                    default:
+                        jj_la1[4] = jj_gen;
+                        goto label_1;
+                }
+
+                conj = Conjunction();
+                mods = Modifiers();
+                q = Clause(field);
+                AddClause(clauses, conj, mods, q);
+            }
+
+        label_1:
+
+            if (clauses.Count == 1 && firstQuery != null)
+            {
+                if (true) return firstQuery;
+            }
+
+            return GetBooleanQuery(clauses);
+        }
+
+        public Query Clause(string field)
+        {
+            Query q;
+            Token fieldToken = null, boost = null;
+            if (Jj_2_1(2))
+            {
+                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                {
+                    case RegexpToken.TERM:
+                        fieldToken = Jj_consume_token(RegexpToken.TERM);
+                        Jj_consume_token(RegexpToken.COLON);
+                        field = DiscardEscapeChar(fieldToken.image);
+                        break;
+                    case RegexpToken.STAR:
+                        Jj_consume_token(RegexpToken.STAR);
+                        Jj_consume_token(RegexpToken.COLON);
+                        field = "*";
+                        break;
+                    default:
+                        jj_la1[5] = jj_gen;
+                        Jj_consume_token(-1);
+                        throw new ParseException();
+                }
+            }
+            else
+            {
+                ;
+            }
+            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+            {
+                case RegexpToken.BAREOPER:
+                case RegexpToken.STAR:
+                case RegexpToken.QUOTED:
+                case RegexpToken.TERM:
+                case RegexpToken.PREFIXTERM:
+                case RegexpToken.WILDTERM:
+                case RegexpToken.REGEXPTERM:
+                case RegexpToken.RANGEIN_START:
+                case RegexpToken.RANGEEX_START:
+                case RegexpToken.NUMBER:
+                    q = Term(field);
+                    break;
+                case RegexpToken.LPAREN:
+                    Jj_consume_token(RegexpToken.LPAREN);
+                    q = Query(field);
+                    Jj_consume_token(RegexpToken.RPAREN);
+                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                    {
+                        case RegexpToken.CARAT:
+                            Jj_consume_token(RegexpToken.CARAT);
+                            boost = Jj_consume_token(RegexpToken.NUMBER);
+                            break;
+                        default:
+                            jj_la1[6] = jj_gen;
+                            break;
+                    }
+                    break;
+                default:
+                    jj_la1[7] = jj_gen;
+                    Jj_consume_token(-1);
+                    throw new ParseException();
+            }
+            {
+                if (true) return HandleBoost(q, boost);
+            }
+            throw new Exception("Missing return statement in function");
+        }
+
+        public Query Term(String field)
+        {
+            Token term, boost = null, fuzzySlop = null, goop1, goop2;
+            bool prefix = false;
+            bool wildcard = false;
+            bool fuzzy = false;
+            bool regexp = false;
+            bool startInc = false;
+            bool endInc = false;
+            Query q;
+            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+            {
+                case RegexpToken.BAREOPER:
+                case RegexpToken.STAR:
+                case RegexpToken.TERM:
+                case RegexpToken.PREFIXTERM:
+                case RegexpToken.WILDTERM:
+                case RegexpToken.REGEXPTERM:
+                case RegexpToken.NUMBER:
+                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                    {
+                        case RegexpToken.TERM:
+                            term = Jj_consume_token(RegexpToken.TERM);
+                            break;
+                        case RegexpToken.STAR:
+                            term = Jj_consume_token(RegexpToken.STAR);
+                            wildcard = true;
+                            break;
+                        case RegexpToken.PREFIXTERM:
+                            term = Jj_consume_token(RegexpToken.PREFIXTERM);
+                            prefix = true;
+                            break;
+                        case RegexpToken.WILDTERM:
+                            term = Jj_consume_token(RegexpToken.WILDTERM);
+                            wildcard = true;
+                            break;
+                        case RegexpToken.REGEXPTERM:
+                            term = Jj_consume_token(RegexpToken.REGEXPTERM);
+                            regexp = true;
+                            break;
+                        case RegexpToken.NUMBER:
+                            term = Jj_consume_token(RegexpToken.NUMBER);
+                            break;
+                        case RegexpToken.BAREOPER:
+                            term = Jj_consume_token(RegexpToken.BAREOPER);
+                            term.image = term.image.Substring(0, 1);
+                            break;
+                        default:
+                            jj_la1[8] = jj_gen;
+                            Jj_consume_token(-1);
+                            throw new ParseException();
+                    }
+                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                    {
+                        case RegexpToken.FUZZY_SLOP:
+                            fuzzySlop = Jj_consume_token(RegexpToken.FUZZY_SLOP);
+                            fuzzy = true;
+                            break;
+                        default:
+                            jj_la1[9] = jj_gen;
+                            break;
+                    }
+                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                    {
+                        case RegexpToken.CARAT:
+                            Jj_consume_token(RegexpToken.CARAT);
+                            boost = Jj_consume_token(RegexpToken.NUMBER);
+                            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                            {
+                                case RegexpToken.FUZZY_SLOP:
+                                    fuzzySlop = Jj_consume_token(RegexpToken.FUZZY_SLOP);
+                                    fuzzy = true;
+                                    break;
+                                default:
+                                    jj_la1[10] = jj_gen;
+                                    break;
+                            }
+                            break;
+                        default:
+                            jj_la1[11] = jj_gen;
+                            break;
+                    }
+                    q = HandleBareTokenQuery(field, term, fuzzySlop, prefix, wildcard, fuzzy, regexp);
+                    break;
+                case RegexpToken.RANGEIN_START:
+                case RegexpToken.RANGEEX_START:
+                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                    {
+                        case RegexpToken.RANGEIN_START:
+                            Jj_consume_token(RegexpToken.RANGEIN_START);
+                            startInc = true;
+                            break;
+                        case RegexpToken.RANGEEX_START:
+                            Jj_consume_token(RegexpToken.RANGEEX_START);
+                            break;
+                        default:
+                            jj_la1[12] = jj_gen;
+                            Jj_consume_token(-1);
+                            throw new ParseException();
+                    }
+                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                    {
+                        case RegexpToken.RANGE_GOOP:
+                            goop1 = Jj_consume_token(RegexpToken.RANGE_GOOP);
+                            break;
+                        case RegexpToken.RANGE_QUOTED:
+                            goop1 = Jj_consume_token(RegexpToken.RANGE_QUOTED);
+                            break;
+                        default:
+                            jj_la1[13] = jj_gen;
+                            Jj_consume_token(-1);
+                            throw new ParseException();
+                    }
+                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                    {
+                        case RegexpToken.RANGE_TO:
+                            Jj_consume_token(RegexpToken.RANGE_TO);
+                            break;
+                        default:
+                            jj_la1[14] = jj_gen;
+                            break;
+                    }
+                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                    {
+                        case RegexpToken.RANGE_GOOP:
+                            goop2 = Jj_consume_token(RegexpToken.RANGE_GOOP);
+                            break;
+                        case RegexpToken.RANGE_QUOTED:
+                            goop2 = Jj_consume_token(RegexpToken.RANGE_QUOTED);
+                            break;
+                        default:
+                            jj_la1[15] = jj_gen;
+                            Jj_consume_token(-1);
+                            throw new ParseException();
+                    }
+                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                    {
+                        case RegexpToken.RANGEIN_END:
+                            Jj_consume_token(RegexpToken.RANGEIN_END);
+                            endInc = true;
+                            break;
+                        case RegexpToken.RANGEEX_END:
+                            Jj_consume_token(RegexpToken.RANGEEX_END);
+                            break;
+                        default:
+                            jj_la1[16] = jj_gen;
+                            Jj_consume_token(-1);
+                            throw new ParseException();
+                    }
+                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                    {
+                        case RegexpToken.CARAT:
+                            Jj_consume_token(RegexpToken.CARAT);
+                            boost = Jj_consume_token(RegexpToken.NUMBER);
+                            break;
+                        default:
+                            jj_la1[17] = jj_gen;
+                            break;
+                    }
+                    bool startOpen = false;
+                    bool endOpen = false;
+                    if (goop1.kind == RegexpToken.RANGE_QUOTED)
+                    {
+                        goop1.image = goop1.image.Substring(1, goop1.image.Length - 2);
+                    }
+                    else if ("*".Equals(goop1.image))
+                    {
+                        startOpen = true;
+                    }
+                    if (goop2.kind == RegexpToken.RANGE_QUOTED)
+                    {
+                        goop2.image = goop2.image.Substring(1, goop2.image.Length - 2);
+                    }
+                    else if ("*".Equals(goop2.image))
+                    {
+                        endOpen = true;
+                    }
+                    q = GetRangeQuery(field, startOpen ? null : DiscardEscapeChar(goop1.image), endOpen ? null : DiscardEscapeChar(goop2.image), startInc, endInc);
+                    break;
+                case RegexpToken.QUOTED:
+                    term = Jj_consume_token(RegexpToken.QUOTED);
+                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                    {
+                        case RegexpToken.FUZZY_SLOP:
+                            fuzzySlop = Jj_consume_token(RegexpToken.FUZZY_SLOP);
+                            break;
+                        default:
+                            jj_la1[18] = jj_gen;
+                            break;
+                    }
+                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+                    {
+                        case RegexpToken.CARAT:
+                            Jj_consume_token(RegexpToken.CARAT);
+                            boost = Jj_consume_token(RegexpToken.NUMBER);
+                            break;
+                        default:
+                            jj_la1[19] = jj_gen;
+                            break;
+                    }
+                    q = HandleQuotedTerm(field, term, fuzzySlop);
+                    break;
+                default:
+                    jj_la1[20] = jj_gen;
+                    Jj_consume_token(-1);
+                    throw new ParseException();
+            }
+            { if (true) return HandleBoost(q, boost); }
+            throw new Exception("Missing return statement in function");
+        }
+
+        private bool Jj_2_1(int xla)
+        {
+            jj_la = xla;
+            jj_lastpos = jj_scanpos = token;
+            try
+            {
+                return !Jj_3_1();
+            }
+            catch (LookaheadSuccess)
+            {
+                return true;
+            }
+            finally
+            {
+                Jj_save(0, xla);
+            }
+        }
+
+        private bool Jj_3R_2()
+        {
+            if (Jj_scan_token(RegexpToken.TERM)) return true;
+            if (Jj_scan_token(RegexpToken.COLON)) return true;
+            return false;
+        }
+
+        private bool Jj_3_1()
+        {
+            Token xsp;
+            xsp = jj_scanpos;
+            if (Jj_3R_2())
+            {
+                jj_scanpos = xsp;
+                if (Jj_3R_3()) return true;
+            }
+            return false;
+        }
+
+        private bool Jj_3R_3()
+        {
+            if (Jj_scan_token(RegexpToken.STAR)) return true;
+            if (Jj_scan_token(RegexpToken.COLON)) return true;
+            return false;
+        }
+
+        /* Generated Token Manager. */
+        public QueryParserTokenManager token_source;
+        /* Current token. */
+        public Token token;
+        /* Next token. */
+        public Token jj_nt;
+        private int jj_ntk;
+        private Token jj_scanpos, jj_lastpos;
+        private int jj_la;
+        private int jj_gen;
+        private int[] jj_la1 = new int[21];
+        private static uint[] jj_la1_0;
+        private static int[] jj_la1_1;
+
+        static QueryParser()
+        {
+            {
+                Jj_la1_init_0();
+                Jj_la1_init_1();
+            }
+        }
+
+        private static void Jj_la1_init_0()
+        {
+            jj_la1_0 = new uint[] 
+            { 
+                0x300, 0x300, 0x1c00, 0x1c00, 0xfda7f00, 0x120000, 0x40000, 0xfda6000, 0x9d22000, 0x200000, 
+                0x200000, 0x40000, 0x6000000, 0x80000000, 0x10000000, 0x80000000, 0x60000000, 0x40000, 
+                0x200000, 0x40000, 0xfda2000, 
+            };
+        }
+
+        private static void Jj_la1_init_1()
+        {
+            jj_la1_1 = new int[] 
+            { 
+                0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 
+                0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 
+            };
+        }
+
+        private JJCalls[] jj_2_rtns = new JJCalls[1];
+        private bool jj_rescan = false;
+        private int jj_gc = 0;
+
+        /// <summary>Constructor with user supplied CharStream. </summary>
+        protected internal QueryParser(ICharStream stream)
+        {
+            token_source = new QueryParserTokenManager(stream);
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 21; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        /// <summary>Reinitialise. </summary>
+        public override void ReInit(ICharStream stream)
+        {
+            token_source.ReInit(stream);
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 21; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        /// <summary>Constructor with generated Token Manager. </summary>
+        protected QueryParser(QueryParserTokenManager tm)
+        {
+            token_source = tm;
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 21; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        /// <summary>Reinitialise. </summary>
+        public void ReInit(QueryParserTokenManager tm)
+        {
+            token_source = tm;
+            token = new Token();
+            jj_ntk = -1;
+            jj_gen = 0;
+            for (int i = 0; i < 21; i++) jj_la1[i] = -1;
+            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+        }
+
+        private Token Jj_consume_token(int kind)
+        {
+            Token oldToken;
+            if ((oldToken = token).next != null) token = token.next;
+            else token = token.next = token_source.GetNextToken();
+            jj_ntk = -1;
+            if (token.kind == kind)
+            {
+                jj_gen++;
+                if (++jj_gc > 100)
+                {
+                    jj_gc = 0;
+                    for (int i = 0; i < jj_2_rtns.Length; i++)
+                    {
+                        JJCalls c = jj_2_rtns[i];
+                        while (c != null)
+                        {
+                            if (c.gen < jj_gen) c.first = null;
+                            c = c.next;
+                        }
+                    }
+                }
+                return token;
+            }
+            token = oldToken;
+            jj_kind = kind;
+            throw GenerateParseException();
+        }
+
+        [Serializable]
+        private sealed class LookaheadSuccess : Exception
+        {
+        }
+
+        private LookaheadSuccess jj_ls = new LookaheadSuccess();
+        private bool Jj_scan_token(int kind)
+        {
+            if (jj_scanpos == jj_lastpos)
+            {
+                jj_la--;
+                if (jj_scanpos.next == null)
+                {
+                    jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.GetNextToken();
+                }
+                else
+                {
+                    jj_lastpos = jj_scanpos = jj_scanpos.next;
+                }
+            }
+            else
+            {
+                jj_scanpos = jj_scanpos.next;
+            }
+            if (jj_rescan)
+            {
+                int i = 0;
+                Token tok = token;
+                while (tok != null && tok != jj_scanpos)
+                {
+                    i++;
+                    tok = tok.next;
+                }
+                if (tok != null) Jj_add_error_token(kind, i);
+            }
+            if (jj_scanpos.kind != kind) return true;
+            if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;
+            return false;
+        }
+
+        /// <summary>Get the next Token. </summary>
+        public Token GetNextToken()
+        {
+            if (token.next != null) token = token.next;
+            else token = token.next = token_source.GetNextToken();
+            jj_ntk = -1;
+            jj_gen++;
+            return token;
+        }
+
+        /// <summary>Get the specific Token. </summary>
+        public Token GetToken(int index)
+        {
+            Token t = token;
+            for (int i = 0; i < index; i++)
+            {
+                if (t.next != null) t = t.next;
+                else t = t.next = token_source.GetNextToken();
+            }
+            return t;
+        }
+
+        private int Jj_ntk()
+        {
+            if ((jj_nt = token.next) == null)
+                return (jj_ntk = (token.next = token_source.GetNextToken()).kind);
+            else
+                return (jj_ntk = jj_nt.kind);
+        }
+
+        private List<int[]> jj_expentries = new List<int[]>();
+        private int[] jj_expentry;
+        private int jj_kind = -1;
+        private int[] jj_lasttokens = new int[100];
+        private int jj_endpos;
+
+        private void Jj_add_error_token(int kind, int pos)
+        {
+            if (pos >= 100) return;
+            if (pos == jj_endpos + 1)
+            {
+                jj_lasttokens[jj_endpos++] = kind;
+            }
+            else if (jj_endpos != 0)
+            {
+                jj_expentry = new int[jj_endpos];
+                for (int i = 0; i < jj_endpos; i++)
+                {
+                    jj_expentry[i] = jj_lasttokens[i];
+                }
+
+                foreach (var oldentry in jj_expentries)
+                {
+                    if (oldentry.Length == jj_expentry.Length)
+                    {
+                        for (int i = 0; i < jj_expentry.Length; i++)
+                        {
+                            if (oldentry[i] != jj_expentry[i])
+                            {
+                                continue;
+                            }
+                        }
+                        jj_expentries.Add(jj_expentry);
+                        break;
+                    }
+                }
+                if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind;
+            }
+        }
+
+        /// <summary>Generate ParseException. </summary>
+        public virtual ParseException GenerateParseException()
+        {
+            jj_expentries.Clear();
+            bool[] la1tokens = new bool[33];
+            if (jj_kind >= 0)
+            {
+                la1tokens[jj_kind] = true;
+                jj_kind = -1;
+            }
+            for (int i = 0; i < 21; i++)
+            {
+                if (jj_la1[i] == jj_gen)
+                {
+                    for (int j = 0; j < 32; j++)
+                    {
+                        if ((jj_la1_0[i] & (1 << j)) != 0)
+                        {
+                            la1tokens[j] = true;
+                        }
+                        if ((jj_la1_1[i] & (1 << j)) != 0)
+                        {
+                            la1tokens[32 + j] = true;
+                        }
+                    }
+                }
+            }
+            for (int i = 0; i < 33; i++)
+            {
+                if (la1tokens[i])
+                {
+                    jj_expentry = new int[1];
+                    jj_expentry[0] = i;
+                    jj_expentries.Add(jj_expentry);
+                }
+            }
+            jj_endpos = 0;
+            Jj_rescan_token();
+            Jj_add_error_token(0, 0);
+            int[][] exptokseq = new int[jj_expentries.Count][];
+            for (int i = 0; i < jj_expentries.Count; i++)
+            {
+                exptokseq[i] = jj_expentries[i];
+            }
+            return new ParseException(token, exptokseq, QueryParserConstants.TokenImage);
+        }
+
+        /// <summary>Enable tracing. </summary>
+        public void Enable_tracing()
+        {
+        }
+
+        /// <summary>Disable tracing. </summary>
+        public void Disable_tracing()
+        {
+        }
+
+        private void Jj_rescan_token()
+        {
+            jj_rescan = true;
+            for (int i = 0; i < 1; i++)
+            {
+                try
+                {
+                    JJCalls p = jj_2_rtns[i];
+                    do
+                    {
+                        if (p.gen > jj_gen)
+                        {
+                            jj_la = p.arg;
+                            jj_lastpos = jj_scanpos = p.first;
+                            switch (i)
+                            {
+                                case 0:
+                                    Jj_3_1();
+                                    break;
+                            }
+                        }
+                        p = p.next;
+                    } while (p != null);
+                }
+                catch (LookaheadSuccess)
+                {
+                }
+            }
+            jj_rescan = false;
+        }
+
+        private void Jj_save(int index, int xla)
+        {
+            JJCalls p = jj_2_rtns[index];
+            while (p.gen > jj_gen)
+            {
+                if (p.next == null)
+                {
+                    p = p.next = new JJCalls();
+                    break;
+                }
+                p = p.next;
+            }
+            p.gen = jj_gen + xla - jj_la;
+            p.first = token;
+            p.arg = xla;
+        }
+
+        internal sealed class JJCalls
+        {
+            internal int gen;
+            internal Token first;
+            internal int arg;
+            internal JJCalls next;
+        }
+    }
+}
\ No newline at end of file


[26/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs b/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
new file mode 100644
index 0000000..599110e
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
@@ -0,0 +1,1025 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.QueryParser.Classic;
+using Lucene.Net.QueryParser.Flexible.Standard;
+using Lucene.Net.Search;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.IO;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public abstract partial class QueryParserBase : QueryBuilder, ICommonQueryParserConfiguration
+    {
+        /// <summary>
+        /// Do not catch this exception in your code, it means you are using methods that you should no longer use.
+        /// </summary>
+        public class MethodRemovedUseAnother : Exception {}
+
+        protected const int CONJ_NONE = 0;
+        protected const int CONJ_AND = 1;
+        protected const int CONJ_OR = 2;
+
+        protected const int MOD_NONE = 0;
+        protected const int MOD_NOT = 10;
+        protected const int MOD_REQ = 11;
+
+
+        // make it possible to call setDefaultOperator() without accessing
+        // the nested class:
+        
+        /// <summary>
+        /// Alternative form of QueryParser.Operator.AND
+        /// </summary>
+        public const Operator AND_OPERATOR = Operator.AND;
+        /// <summary>
+        /// Alternative form of QueryParser.Operator.OR
+        /// </summary>
+        public const Operator OR_OPERATOR = Operator.OR;
+
+        ///// <summary>
+        ///// The actual operator that parser uses to combine query terms
+        ///// </summary>
+        //Operator operator_Renamed = OR_OPERATOR;
+
+
+        // Note: In Java, this was part of the QueryParser class. 
+        // However, in .NET we need to put it here for the constants
+        // defined above.
+
+        /// <summary>
+        /// The default operator for parsing queries. 
+        /// Use <see cref="QueryParserBase.SetDefaultOperator"/> to change it.
+        /// </summary>
+        public enum Operator
+        {
+            OR,
+            AND
+        }
+
+        //bool lowercaseExpandedTerms = true;
+        //MultiTermQuery.RewriteMethod multiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT;
+        //bool allowLeadingWildcard = false;
+
+        // LUCENENET-423 - DateRange differences with Java and .NET
+        private bool _useJavaStyleDateRangeParsing = false;
+
+        protected string field;
+        //int phraseSlop = 0;
+        //float fuzzyMinSim = FuzzyQuery.DefaultMinSimilarity;
+        //int fuzzyPrefixLength = FuzzyQuery.DefaultPrefixLength;
+        //CultureInfo locale = CultureInfo.CurrentCulture;
+        //TimeZoneInfo timeZone = TimeZoneInfo.Local;
+
+        // TODO: Work out what the default date resolution SHOULD be (was null in Java, which isn't valid for an enum type)
+        
+        /// <summary>
+        /// the default date resolution
+        /// </summary>
+        DateTools.Resolution dateResolution = DateTools.Resolution.DAY;
+        /// <summary>
+        ///  maps field names to date resolutions
+        /// </summary>
+        IDictionary<string, DateTools.Resolution> fieldToDateResolution = null;
+
+        /// <summary>
+        /// Whether or not to analyze range terms when constructing RangeQuerys
+        /// (For example, analyzing terms into collation keys for locale-sensitive RangeQuery)
+        /// </summary>
+        //bool analyzeRangeTerms = false;
+
+        /// <summary>
+        /// So the generated QueryParser(CharStream) won't error out
+        /// </summary>
+        protected QueryParserBase()
+            : base(null)
+        {
+            // Set property defaults.
+            DefaultOperator = OR_OPERATOR;
+            LowercaseExpandedTerms = true;
+            MultiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT;
+            AllowLeadingWildcard = false;
+
+            PhraseSlop = 0;
+            FuzzyMinSim = FuzzyQuery.DefaultMinSimilarity;
+            FuzzyPrefixLength = FuzzyQuery.DefaultPrefixLength;
+            Locale = CultureInfo.CurrentCulture;
+            TimeZone = TimeZoneInfo.Local;
+            
+            AnalyzeRangeTerms = false;
+        }
+
+        /// <summary>
+        /// Initializes a query parser.  Called by the QueryParser constructor
+        /// </summary>
+        /// <param name="matchVersion">Lucene version to match.</param>
+        /// <param name="f">the default field for query terms.</param>
+        /// <param name="a">used to find terms in the query text.</param>
+        public void Init(LuceneVersion matchVersion, string f, Analyzer a)
+        {
+            Analyzer = a;
+            field = f;
+            if (matchVersion.OnOrAfter(LuceneVersion.LUCENE_31))
+            {
+                AutoGeneratePhraseQueries = false;
+            }
+            else
+            {
+                AutoGeneratePhraseQueries = true;
+            }
+        }
+
+        // the generated parser will create these in QueryParser
+        public abstract void ReInit(ICharStream stream);
+        public abstract Query TopLevelQuery(string field);
+
+        /// <summary>
+        /// Parses a query string, returning a <see cref="T:Query"/>.
+        /// </summary>
+        /// <remarks>
+        /// throws ParseException if the parsing fails
+        /// </remarks>
+        /// <param name="query">the query string to be parsed.</param>
+        /// <returns></returns>
+        public virtual Query Parse(string query)
+        {
+            ReInit(new FastCharStream(new StringReader(query)));
+            try
+            {
+                // TopLevelQuery is a Query followed by the end-of-input (EOF)
+                Query res = TopLevelQuery(field);
+                return res != null ? res : NewBooleanQuery(false);
+            }
+            catch (ParseException tme)
+            {
+                // rethrow to include the original query:
+                throw new ParseException("Cannot parse '" + query + "': " + tme.Message, tme);
+            }
+            catch (TokenMgrError tme)
+            {
+                throw new ParseException("Cannot parse '" + query + "': " + tme.Message, tme);
+            }
+            catch (BooleanQuery.TooManyClauses tmc)
+            {
+                throw new ParseException("Cannot parse '" + query + "': too many boolean clauses", tmc);
+            }
+        }
+
+        /// <summary>
+        /// Returns the default field.
+        /// </summary>
+        public string Field
+        {
+            get { return field; }
+        }
+
+        /// <summary>
+        /// Set to true if phrase queries will be automatically generated
+        /// when the analyzer returns more than one term from whitespace
+        /// delimited text.
+        /// NOTE: this behavior may not be suitable for all languages.
+        /// <p>
+        /// Set to false if phrase queries should only be generated when
+        /// surrounded by double quotes.
+        /// </summary>
+        public bool AutoGeneratePhraseQueries { get; set; }
+
+        /// <summary>
+        /// Get or Set the minimum similarity for fuzzy queries.
+        /// Default is 2f.
+        /// </summary>
+        public float FuzzyMinSim { get; set; }
+
+        /// <summary>
+        /// Get or Set the prefix length for fuzzy queries. 
+        /// Default is 0.
+        /// </summary>
+        public int FuzzyPrefixLength { get; set; }
+
+        /// <summary>
+        /// Gets or Sets the default slop for phrases. 
+        /// If zero, then exact phrase matches are required. 
+        /// Default value is zero.
+        /// </summary>
+        public int PhraseSlop { get; set; }
+
+        /// <summary>
+        /// Set to <code>true</code> to allow leading wildcard characters.
+        /// <p>
+        /// When set, <code>*</code> or <code>?</code> are allowed as
+        /// the first character of a PrefixQuery and WildcardQuery.
+        /// Note that this can produce very slow
+        /// queries on big indexes.
+        /// <p>
+        /// Default: false.
+        /// </summary>
+        public bool AllowLeadingWildcard { get; set; }
+
+        /// <summary>
+        /// Gets or Sets the boolean operator of the QueryParser.
+        /// In default mode (<code>OR_OPERATOR</code>) terms without any modifiers
+        /// are considered optional: for example <code>capital of Hungary</code> is equal to
+        /// <code>capital OR of OR Hungary</code>.<br/>
+        /// In <code>AND_OPERATOR</code> mode terms are considered to be in conjunction: the
+        /// above mentioned query is parsed as <code>capital AND of AND Hungary
+        /// </summary>
+        public Operator DefaultOperator { get; set; }
+
+        /// <summary>
+        /// Whether terms of wildcard, prefix, fuzzy and range queries are to be automatically
+        //  lower-cased or not.  Default is <code>true</code>.
+        /// </summary>
+        public bool LowercaseExpandedTerms { get; set; }
+
+        /// <summary>
+        /// By default QueryParser uses <see cref="MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT"/>
+        /// when creating a <see cref="PrefixQuery"/>, <see cref="WildcardQuery"/> or <see cref="TermRangeQuery"/>. This implementation is generally preferable because it
+        /// a) Runs faster b) Does not have the scarcity of terms unduly influence score
+        /// c) avoids any <see cref="TooManyClauses"/> exception.
+        /// However, if your application really needs to use the
+        /// old-fashioned <see cref="BooleanQuery"/> expansion rewriting and the above
+        /// points are not relevant then use this to change
+        /// the rewrite method.
+        /// </summary>
+        public MultiTermQuery.RewriteMethod MultiTermRewriteMethod { get; set; }
+
+        /// <summary>
+        /// Get or Set locale used by date range parsing, lowercasing, and other
+        /// locale-sensitive operations.
+        /// </summary>
+        public CultureInfo Locale { get; set; }
+
+        public TimeZoneInfo TimeZone { get; set; }
+
+        /// <summary>
+        /// Gets or Sets the default date resolution used by RangeQueries for fields for which no
+        /// specific date resolutions has been set. Field specific resolutions can be set
+        /// with <see cref="SetDateResolution(string,DateTools.Resolution)"/>.
+        /// </summary>
+        public void SetDateResolution(DateTools.Resolution dateResolution)
+        {
+            this.dateResolution = dateResolution;
+        }
+
+        /// <summary>
+        /// Sets the date resolution used by RangeQueries for a specific field.
+        /// </summary>
+        /// <param name="fieldName">field for which the date resolution is to be set</param>
+        /// <param name="dateResolution">date resolution to set</param>
+        public void SetDateResolution(string fieldName, DateTools.Resolution dateResolution)
+        {
+            if (string.IsNullOrEmpty(fieldName))
+            {
+                throw new ArgumentNullException("fieldName cannot be null or empty string.");
+            }
+
+            if (fieldToDateResolution == null)
+            {
+                // lazily initialize Dictionary
+                fieldToDateResolution = new Dictionary<string, DateTools.Resolution>();
+            }
+
+            fieldToDateResolution[fieldName] = dateResolution;
+        }
+
+        /// <summary>
+        /// Returns the date resolution that is used by RangeQueries for the given field.
+        /// Returns null, if no default or field specific date resolution has been set 
+        /// for the given field.
+        /// </summary>
+        /// <param name="fieldName"></param>
+        /// <returns></returns>
+        public DateTools.Resolution GetDateResolution(string fieldName)
+        {
+            if (string.IsNullOrEmpty(fieldName))
+            {
+                throw new ArgumentNullException("fieldName cannot be null or empty string.");
+            }
+
+            if (fieldToDateResolution == null)
+            {
+                // no field specific date resolutions set; return default date resolution instead
+                return this.dateResolution;
+            }
+
+            if (!fieldToDateResolution.ContainsKey(fieldName))
+            {
+                // no date resolutions set for the given field; return default date resolution instead
+                return this.dateResolution;
+            }
+
+            return fieldToDateResolution[fieldName];
+        }
+
+        /// <summary>
+        /// Get or Set whether or not to analyze range terms when constructing <see cref="TermRangeQuery"/>s.
+        /// For example, setting this to true can enable analyzing terms into 
+        /// collation keys for locale-sensitive <see cref="TermRangeQuery"/>.
+        /// </summary>
+        public bool AnalyzeRangeTerms { get; set; }
+
+        protected internal virtual void AddClause(IList<BooleanClause> clauses, int conj, int mods, Query q)
+        {
+            bool required, prohibited;
+
+            // If this term is introduced by AND, make the preceding term required,
+            // unless it's already prohibited
+            if (clauses.Count > 0 && conj == CONJ_AND)
+            {
+                BooleanClause c = clauses[clauses.Count - 1];
+                if (!c.Prohibited)
+                    c.Occur_ = BooleanClause.Occur.MUST;
+            }
+
+            if (clauses.Count > 0 && DefaultOperator == AND_OPERATOR && conj == CONJ_OR)
+            {
+                // If this term is introduced by OR, make the preceding term optional,
+                // unless it's prohibited (that means we leave -a OR b but +a OR b-->a OR b)
+                // notice if the input is a OR b, first term is parsed as required; without
+                // this modification a OR b would parsed as +a OR b
+                BooleanClause c = clauses[clauses.Count - 1];
+                if (!c.Prohibited)
+                    c.Occur_ = BooleanClause.Occur.SHOULD;
+            }
+
+            // We might have been passed a null query; the term might have been
+            // filtered away by the analyzer.
+            if (q == null)
+                return;
+
+            if (DefaultOperator == OR_OPERATOR)
+            {
+                // We set REQUIRED if we're introduced by AND or +; PROHIBITED if
+                // introduced by NOT or -; make sure not to set both.
+                prohibited = (mods == MOD_NOT);
+                required = (mods == MOD_REQ);
+                if (conj == CONJ_AND && !prohibited)
+                {
+                    required = true;
+                }
+            }
+            else
+            {
+                // We set PROHIBITED if we're introduced by NOT or -; We set REQUIRED
+                // if not PROHIBITED and not introduced by OR
+                prohibited = (mods == MOD_NOT);
+                required = (!prohibited && conj != CONJ_OR);
+            }
+            if (required && !prohibited)
+                clauses.Add(NewBooleanClause(q, BooleanClause.Occur.MUST));
+            else if (!required && !prohibited)
+                clauses.Add(NewBooleanClause(q, BooleanClause.Occur.SHOULD));
+            else if (!required && prohibited)
+                clauses.Add(NewBooleanClause(q, BooleanClause.Occur.MUST_NOT));
+            else
+                throw new Exception("Clause cannot be both required and prohibited");
+        }
+
+        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
+        protected internal virtual Query GetFieldQuery(string field, string queryText, bool quoted)
+        {
+            return NewFieldQuery(Analyzer, field, queryText, quoted);
+        }
+
+        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
+        protected internal virtual Query NewFieldQuery(Analyzer analyzer, string field, string queryText, bool quoted)
+        {
+            BooleanClause.Occur occur = DefaultOperator == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD;
+            return CreateFieldQuery(analyzer, occur, field, queryText, quoted || AutoGeneratePhraseQueries, PhraseSlop);
+        }
+
+        /// <summary>
+        /// Base implementation delegates to <see cref="GetFieldQuery(string,string,bool)"/>.
+        /// This method may be overridden, for example, to return
+        /// a SpanNearQuery instead of a PhraseQuery.
+        /// </summary>
+        /// <param name="field"></param>
+        /// <param name="queryText"></param>
+        /// <param name="slop"></param>
+        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
+        /// <returns></returns>
+        protected internal virtual Query GetFieldQuery(String field, String queryText, int slop)
+        {
+            Query query = GetFieldQuery(field, queryText, true);
+
+            if (query is PhraseQuery)
+            {
+                ((PhraseQuery)query).Slop = slop;
+            }
+            if (query is MultiPhraseQuery)
+            {
+                ((MultiPhraseQuery)query).Slop = slop;
+            }
+
+            return query;
+        }
+
+        protected internal virtual Query GetRangeQuery(string field,
+                              string part1,
+                              string part2,
+                              bool startInclusive,
+                              bool endInclusive)
+        {
+            if (LowercaseExpandedTerms)
+            {
+                part1 = part1 == null ? null : part1.ToLower(Locale);
+                part2 = part2 == null ? null : part2.ToLower(Locale);
+            }
+
+            try
+            {
+                DateTime d1, d2;
+                if (_useJavaStyleDateRangeParsing)
+                {
+                    // TODO: This doesn't emulate java perfectly.
+                    // Java allows parsing of the string up to the end of the pattern
+                    // and then ignores everything else.  .NET will throw an exception, 
+                    // so this will fail in those cases, though the code below is clear
+                    // that users can only specify the date, not the time.
+                    var shortFormat = Locale.DateTimeFormat.ShortDatePattern;
+                    d1 = DateTime.ParseExact(part1, shortFormat, Locale);
+                    d2 = DateTime.ParseExact(part2, shortFormat, Locale);
+                }
+                else
+                {
+                    d1 = DateTime.Parse(part1, Locale);
+                    d2 = DateTime.Parse(part2, Locale);
+                }
+
+                if (endInclusive)
+                {
+                    // The user can only specify the date, not the time, so make sure
+                    // the time is set to the latest possible time of that date to really
+                    // include all documents:
+
+                    // TODO: Try to work out if the Time Zone is pertinent here or
+                    // whether it should just be removed from the API entirely.
+                    // In Java:
+                    // Calendar cal = Calendar.getInstance(timeZone, locale);
+
+                    var cal = Locale.Calendar;
+                    d2 = cal.AddHours(d2, 23);
+                    d2 = cal.AddMinutes(d2, 59);
+                    d2 = cal.AddSeconds(d2, 59);
+                    d2 = cal.AddMilliseconds(d2, 999);
+                }
+                DateTools.Resolution resolution = GetDateResolution(field);
+
+                part1 = DateTools.DateToString(d1, resolution);
+                part2 = DateTools.DateToString(d2, resolution);
+
+            }
+            catch (Exception)
+            {
+            }
+
+            return NewRangeQuery(field, part1, part2, startInclusive, endInclusive);
+        }
+
+        /// <summary>Builds a new BooleanClause instance</summary>
+        /// <param name="q">sub query</param>
+        /// <param name="occur">how this clause should occur when matching documents</param>
+        /// <returns> new BooleanClause instance</returns>
+        protected internal virtual BooleanClause NewBooleanClause(Query q, BooleanClause.Occur occur)
+        {
+            return new BooleanClause(q, occur);
+        }
+
+        /// <summary>
+        /// Builds a new PrefixQuery instance
+        /// </summary>
+        /// <param name="prefix">Prefix term</param>
+        /// <returns>new PrefixQuery instance</returns>
+        protected internal virtual Query NewPrefixQuery(Term prefix)
+        {
+            PrefixQuery query = new PrefixQuery(prefix);
+            query.SetRewriteMethod(MultiTermRewriteMethod);
+            return query;
+        }
+
+        /// <summary>
+        /// Builds a new RegexpQuery instance
+        /// </summary>
+        /// <param name="regexp">Regexp term</param>
+        /// <returns>new RegexpQuery instance</returns>
+        protected internal virtual Query NewRegexpQuery(Term regexp)
+        {
+            RegexpQuery query = new RegexpQuery(regexp);
+            query.SetRewriteMethod(MultiTermRewriteMethod);
+            return query;
+        }
+
+        /// <summary>
+        /// Builds a new FuzzyQuery instance
+        /// </summary>
+        /// <param name="term">Term</param>
+        /// <param name="minimumSimilarity">minimum similarity</param>
+        /// <param name="prefixLength">prefix length</param>
+        /// <returns>new FuzzyQuery Instance</returns>
+        protected internal virtual Query NewFuzzyQuery(Term term, float minimumSimilarity, int prefixLength)
+        {
+            // FuzzyQuery doesn't yet allow constant score rewrite
+            string text = term.Text();
+            int numEdits = FuzzyQuery.FloatToEdits(minimumSimilarity,
+                Character.CodePointCount(text,0, text.Length));
+            return new FuzzyQuery(term, numEdits, prefixLength);
+        }
+
+        // LUCENE TODO: Should this be protected instead?
+        private BytesRef AnalyzeMultitermTerm(string field, string part)
+        {
+            return AnalyzeMultitermTerm(field, part, Analyzer);
+        }
+
+        protected internal virtual BytesRef AnalyzeMultitermTerm(string field, string part, Analyzer analyzerIn)
+        {
+            if (analyzerIn == null) analyzerIn = Analyzer;
+
+            TokenStream source = null;
+            try
+            {
+                source = analyzerIn.TokenStream(field, part);
+                source.Reset();
+
+                ITermToBytesRefAttribute termAtt = source.GetAttribute<ITermToBytesRefAttribute>();
+                BytesRef bytes = termAtt.BytesRef;
+
+                if (!source.IncrementToken())
+                    throw new ArgumentException("analyzer returned no terms for multiTerm term: " + part);
+                termAtt.FillBytesRef();
+                if (source.IncrementToken())
+                    throw new ArgumentException("analyzer returned too many terms for multiTerm term: " + part);
+                source.End();
+                return BytesRef.DeepCopyOf(bytes);
+            }
+            catch (IOException e)
+            {
+                throw new Exception("Error analyzing multiTerm term: " + part, e);
+            }
+            finally
+            {
+                IOUtils.CloseWhileHandlingException(source);
+            }
+        }
+
+        /// <summary>
+        /// Builds a new {@link TermRangeQuery} instance
+        /// </summary>
+        /// <param name="field">Field</param>
+        /// <param name="part1">min</param>
+        /// <param name="part2">max</param>
+        /// <param name="startInclusive">true if the start of the range is inclusive</param>
+        /// <param name="endInclusive">true if the end of the range is inclusive</param>
+        /// <returns>new <see cref="T:TermRangeQuery"/> instance</returns>
+        protected internal virtual Query NewRangeQuery(string field, string part1, string part2, bool startInclusive, bool endInclusive)
+        {
+            BytesRef start;
+            BytesRef end;
+
+            if (part1 == null)
+            {
+                start = null;
+            }
+            else
+            {
+                start = AnalyzeRangeTerms ? AnalyzeMultitermTerm(field, part1) : new BytesRef(part1);
+            }
+
+            if (part2 == null)
+            {
+                end = null;
+            }
+            else
+            {
+                end = AnalyzeRangeTerms ? AnalyzeMultitermTerm(field, part2) : new BytesRef(part2);
+            }
+
+            TermRangeQuery query = new TermRangeQuery(field, start, end, startInclusive, endInclusive);
+
+            query.SetRewriteMethod(MultiTermRewriteMethod);
+            return query;
+        }
+
+        /// <summary>
+        /// Builds a new MatchAllDocsQuery instance
+        /// </summary>
+        /// <returns>new MatchAllDocsQuery instance</returns>
+        protected internal virtual Query NewMatchAllDocsQuery()
+        {
+            return new MatchAllDocsQuery();
+        }
+
+        /// <summary>
+        /// Builds a new WildcardQuery instance
+        /// </summary>
+        /// <param name="t">wildcard term</param>
+        /// <returns>new WildcardQuery instance</returns>
+        protected internal virtual Query NewWildcardQuery(Term t)
+        {
+            WildcardQuery query = new WildcardQuery(t);
+            query.SetRewriteMethod(MultiTermRewriteMethod);
+            return query;
+        }
+
+        /// <summary>
+        /// Factory method for generating query, given a set of clauses.
+        /// By default creates a boolean query composed of clauses passed in.
+        ///
+        /// Can be overridden by extending classes, to modify query being
+        /// returned.
+        /// </summary>
+        /// <param name="clauses">List that contains {@link org.apache.lucene.search.BooleanClause} instances 
+        /// to join.</param>
+        /// <exception cref="T:ParseException">throw in overridden method to disallow</exception>
+        /// <returns>Resulting <see cref="T:Query"/> object.</returns>
+        protected internal virtual Query GetBooleanQuery(IList<BooleanClause> clauses)
+        {
+            return GetBooleanQuery(clauses, false);
+        }
+
+        /// <summary>
+        /// Factory method for generating query, given a set of clauses.
+        /// By default creates a boolean query composed of clauses passed in.
+        /// 
+        /// Can be overridden by extending classes, to modify query being
+        /// returned.
+        /// </summary>
+        /// <param name="clauses">List that contains <see cref="T:BooleanClause"/> instances
+        /// to join.</param>
+        /// <param name="disableCoord">true if coord scoring should be disabled.</param>
+        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
+        /// <returns>Resulting <see cref="T:Query"/> object.</returns>
+        protected internal virtual Query GetBooleanQuery(IList<BooleanClause> clauses, bool disableCoord)
+        {
+            if (clauses.Count == 0)
+            {
+                return null; // all clause words were filtered away by the analyzer.
+            }
+            BooleanQuery query = NewBooleanQuery(disableCoord);
+            foreach (BooleanClause clause in clauses)
+            {
+                query.Add(clause);
+            }
+            return query;
+        }
+
+        /// <summary>
+        /// Factory method for generating a query. Called when parser
+        /// parses an input term token that contains one or more wildcard
+        /// characters (? and *), but is not a prefix term token (one
+        /// that has just a single * character at the end)
+        /// <p>
+        /// Depending on settings, prefix term may be lower-cased
+        /// automatically. It will not go through the default Analyzer,
+        /// however, since normal Analyzers are unlikely to work properly
+        /// with wildcard templates.
+        /// <p>
+        /// Can be overridden by extending classes, to provide custom handling for
+        /// wildcard queries, which may be necessary due to missing analyzer calls.
+        /// </summary>
+        /// <param name="field">Name of the field query will use.</param>
+        /// <param name="termStr">Term token that contains one or more wild card
+        /// characters (? or *), but is not simple prefix term</param>
+        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
+        /// <returns>Resulting <see cref="T:Query"/> built for the term</returns>
+        protected internal virtual Query GetWildcardQuery(string field, string termStr)
+        {
+            if ("*".Equals(field))
+            {
+                if ("*".Equals(termStr)) return NewMatchAllDocsQuery();
+            }
+            if (!AllowLeadingWildcard && (termStr.StartsWith("*") || termStr.StartsWith("?")))
+                throw new ParseException("'*' or '?' not allowed as first character in WildcardQuery");
+            if (LowercaseExpandedTerms)
+            {
+                termStr = termStr.ToLower(Locale);
+            }
+            Term t = new Term(field, termStr);
+            return NewWildcardQuery(t);
+        }
+
+        /// <summary>
+        /// Factory method for generating a query. Called when parser
+        /// parses an input term token that contains a regular expression
+        /// query.
+        /// <p>
+        /// Depending on settings, pattern term may be lower-cased
+        /// automatically. It will not go through the default Analyzer,
+        /// however, since normal Analyzers are unlikely to work properly
+        /// with regular expression templates.
+        /// <p>
+        /// Can be overridden by extending classes, to provide custom handling for
+        /// regular expression queries, which may be necessary due to missing analyzer
+        /// calls.
+        /// </summary>
+        /// <param name="field">Name of the field query will use.</param>
+        /// <param name="termStr">Term token that contains a regular expression</param>
+        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
+        /// <returns>Resulting <see cref="T:Query"/> built for the term</returns>
+        protected internal virtual Query GetRegexpQuery(string field, string termStr)
+        {
+            if (LowercaseExpandedTerms)
+            {
+                termStr = termStr.ToLower(Locale);
+            }
+            Term t = new Term(field, termStr);
+            return NewRegexpQuery(t);
+        }
+
+        /// <summary>
+        /// Factory method for generating a query (similar to
+        /// <see cref="M:GetWildcardQuery"/>). Called when parser parses an input term
+        /// token that uses prefix notation; that is, contains a single '*' wildcard
+        /// character as its last character. Since this is a special case
+        /// of generic wildcard term, and such a query can be optimized easily,
+        /// this usually results in a different query object.
+        /// <p>
+        /// Depending on settings, a prefix term may be lower-cased
+        /// automatically. It will not go through the default Analyzer,
+        /// however, since normal Analyzers are unlikely to work properly
+        /// with wildcard templates.
+        /// <p>
+        /// Can be overridden by extending classes, to provide custom handling for
+        /// wild card queries, which may be necessary due to missing analyzer calls.
+        /// </summary>
+        /// <param name="field">Name of the field query will use.</param>
+        /// <param name="termStr">Term token to use for building term for the query</param>
+        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
+        /// <returns>Resulting <see cref="T:Query"/> built for the term</returns>
+        protected internal virtual Query GetPrefixQuery(string field, string termStr)
+        {
+            if (!AllowLeadingWildcard && termStr.StartsWith("*"))
+                throw new ParseException("'*' not allowed as first character in PrefixQuery");
+            if (LowercaseExpandedTerms)
+            {
+                termStr = termStr.ToLower(Locale);
+            }
+            Term t = new Term(field, termStr);
+            return NewPrefixQuery(t);
+        }
+
+        /// <summary>
+        /// Factory method for generating a query (similar to
+        /// <see cref="M:GetWildcardQuery"/>). Called when parser parses
+        /// an input term token that has the fuzzy suffix (~) appended.
+        /// </summary>
+        /// <param name="field">Name of the field query will use.</param>
+        /// <param name="termStr">Term token to use for building term for the query</param>
+        /// <param name="minSimilarity">minimum similarity</param>
+        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
+        /// <returns>Resulting <see cref="T:Query"/> built for the term</returns>
+        protected internal virtual Query GetFuzzyQuery(string field, string termStr, float minSimilarity)
+        {
+            if (LowercaseExpandedTerms)
+            {
+                termStr = termStr.ToLower(Locale);
+            }
+            Term t = new Term(field, termStr);
+            return NewFuzzyQuery(t, minSimilarity, FuzzyPrefixLength);
+        }
+
+        // extracted from the .jj grammar
+        protected internal virtual Query HandleBareTokenQuery(string qfield, Token term, Token fuzzySlop, bool prefix, bool wildcard, bool fuzzy, bool regexp)
+        {
+            Query q;
+
+            string termImage = DiscardEscapeChar(term.image);
+            if (wildcard)
+            {
+                q = GetWildcardQuery(qfield, term.image);
+            }
+            else if (prefix)
+            {
+                q = GetPrefixQuery(qfield, DiscardEscapeChar(term.image.Substring(0, term.image.Length - 1)));
+            }
+            else if (regexp)
+            {
+                q = GetRegexpQuery(qfield, term.image.Substring(1, term.image.Length - 2));
+            }
+            else if (fuzzy)
+            {
+                q = HandleBareFuzzy(qfield, fuzzySlop, termImage);
+            }
+            else
+            {
+                q = GetFieldQuery(qfield, termImage, false);
+            }
+            return q;
+        }
+
+        protected internal virtual Query HandleBareFuzzy(string qfield, Token fuzzySlop, string termImage)
+        {
+            Query q;
+            float fms = FuzzyMinSim;
+            try
+            {
+                fms = float.Parse(fuzzySlop.image.Substring(1), Locale);
+            }
+            catch (Exception ignored) { }
+            if (fms < 0.0f)
+            {
+                throw new ParseException("Minimum similarity for a FuzzyQuery has to be between 0.0f and 1.0f !");
+            }
+            else if (fms >= 1.0f && fms != (int)fms)
+            {
+                throw new ParseException("Fractional edit distances are not allowed!");
+            }
+            q = GetFuzzyQuery(qfield, termImage, fms);
+            return q;
+        }
+
+        // extracted from the .jj grammar
+        protected internal virtual Query HandleQuotedTerm(string qfield, Token term, Token fuzzySlop)
+        {
+            int s = PhraseSlop;  // default
+            if (fuzzySlop != null)
+            {
+                try
+                {
+                    s = (int)float.Parse(fuzzySlop.image.Substring(1), Locale);
+                }
+                catch (Exception ignored) { }
+            }
+            return GetFieldQuery(qfield, DiscardEscapeChar(term.image.Substring(1, term.image.Length - 2)), s);
+        }
+
+        // extracted from the .jj grammar
+        protected internal virtual Query HandleBoost(Query q, Token boost)
+        {
+            if (boost != null)
+            {
+                float f = (float)1.0;
+                try
+                {
+                    f = float.Parse(boost.image, Locale);
+                }
+                catch (Exception ignored)
+                {
+                    /* Should this be handled somehow? (defaults to "no boost", if
+                     * boost number is invalid)
+                     */
+                }
+
+                // avoid boosting null queries, such as those caused by stop words
+                if (q != null)
+                {
+                    q.Boost = f;
+                }
+            }
+            return q;
+        }
+
+        /// <summary>
+        /// Returns a String where the escape char has been
+        /// removed, or kept only once if there was a double escape.
+        /// 
+        /// Supports escaped unicode characters, e. g. translates 
+        /// <code>\\u0041</code> to <code>A</code>.
+        /// </summary>
+        /// <param name="input"></param>
+        /// <returns></returns>
+        protected internal virtual string DiscardEscapeChar(string input)
+        {
+            // Create char array to hold unescaped char sequence
+            char[] output = new char[input.Length];
+
+            // The length of the output can be less than the input
+            // due to discarded escape chars. This variable holds
+            // the actual length of the output
+            int length = 0;
+
+            // We remember whether the last processed character was
+            // an escape character
+            bool lastCharWasEscapeChar = false;
+
+            // The multiplier the current unicode digit must be multiplied with.
+            // E. g. the first digit must be multiplied with 16^3, the second with 16^2...
+            int codePointMultiplier = 0;
+
+            // Used to calculate the codepoint of the escaped unicode character
+            int codePoint = 0;
+
+            for (int i = 0; i < input.Length; i++)
+            {
+                char curChar = input[i];
+                if (codePointMultiplier > 0)
+                {
+                    codePoint += HexToInt(curChar) * codePointMultiplier;
+                    codePointMultiplier = Number.URShift(codePointMultiplier, 4);
+                    if (codePointMultiplier == 0)
+                    {
+                        output[length++] = (char)codePoint;
+                        codePoint = 0;
+                    }
+                }
+                else if (lastCharWasEscapeChar)
+                {
+                    if (curChar == 'u')
+                    {
+                        // found an escaped unicode character
+                        codePointMultiplier = 16 * 16 * 16;
+                    }
+                    else
+                    {
+                        // this character was escaped
+                        output[length] = curChar;
+                        length++;
+                    }
+                    lastCharWasEscapeChar = false;
+                }
+                else
+                {
+                    if (curChar == '\\')
+                    {
+                        lastCharWasEscapeChar = true;
+                    }
+                    else
+                    {
+                        output[length] = curChar;
+                        length++;
+                    }
+                }
+            }
+
+            if (codePointMultiplier > 0)
+            {
+                throw new ParseException("Truncated unicode escape sequence.");
+            }
+
+            if (lastCharWasEscapeChar)
+            {
+                throw new ParseException("Term can not end with escape character.");
+            }
+
+            return new String(output, 0, length);
+        }
+
+        /// <summary>
+        /// Returns the numeric value of the hexadecimal character
+        /// </summary>
+        private static int HexToInt(char c)
+        {
+            if ('0' <= c && c <= '9')
+            {
+                return c - '0';
+            }
+            else if ('a' <= c && c <= 'f')
+            {
+                return c - 'a' + 10;
+            }
+            else if ('A' <= c && c <= 'F')
+            {
+                return c - 'A' + 10;
+            }
+            else
+            {
+                throw new ParseException("Non-hex character in Unicode escape sequence: " + c);
+            }
+        }
+
+        /// <summary>
+        /// Returns a String where those characters that QueryParser
+        /// expects to be escaped are escaped by a preceding <code>\</code>.
+        /// </summary>
+        public static string Escape(string s)
+        {
+            StringBuilder sb = new StringBuilder();
+            for (int i = 0; i < s.Length; i++)
+            {
+                char c = s[i];
+                // These characters are part of the query syntax and must be escaped
+                if (c == '\\' || c == '+' || c == '-' || c == '!' || c == '(' || c == ')' || c == ':'
+                  || c == '^' || c == '[' || c == ']' || c == '\"' || c == '{' || c == '}' || c == '~'
+                  || c == '*' || c == '?' || c == '|' || c == '&' || c == '/')
+                {
+                    sb.Append('\\');
+                }
+                sb.Append(c);
+            }
+            return sb.ToString();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs b/src/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs
new file mode 100644
index 0000000..dcfa193
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs
@@ -0,0 +1,224 @@
+using System;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public static class RegexpToken
+    {
+        /// <summary>End of File. </summary>
+        public const int EOF = 0;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _NUM_CHAR = 1;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _ESCAPED_CHAR = 2;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _TERM_START_CHAR = 3;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _TERM_CHAR = 4;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _WHITESPACE = 5;
+        /// <summary>RegularExpression Id. </summary>
+        public const int _QUOTED_CHAR = 6;
+        /// <summary>RegularExpression Id. </summary>
+        public const int AND = 8;
+        /// <summary>RegularExpression Id. </summary>
+        public const int OR = 9;
+        /// <summary>RegularExpression Id. </summary>
+        public const int NOT = 10;
+        /// <summary>RegularExpression Id. </summary>
+        public const int PLUS = 11;
+        /// <summary>RegularExpression Id. </summary>
+        public const int MINUS = 12;
+        /// <summary>RegularExpression Id. </summary>
+        public const int BAREOPER = 13;
+        /// <summary>RegularExpression Id. </summary>
+        public const int LPAREN = 14;
+        /// <summary>RegularExpression Id. </summary>
+        public const int RPAREN = 15;
+        /// <summary>RegularExpression Id. </summary>
+        public const int COLON = 16;
+        /// <summary>RegularExpression Id. </summary>
+        public const int STAR = 17;
+        /// <summary>RegularExpression Id. </summary>
+        public const int CARAT = 18;
+        /// <summary>RegularExpression Id. </summary>
+        public const int QUOTED = 19;
+        /// <summary>RegularExpression Id. </summary>
+        public const int TERM = 20;
+        /// <summary>RegularExpression Id. </summary>
+        public const int FUZZY_SLOP = 21;
+        /// <summary>RegularExpression Id. </summary>
+        public const int PREFIXTERM = 22;
+        /// <summary>RegularExpression Id. </summary>
+        public const int WILDTERM = 23;
+        /// <summary>RegularExpression Id. </summary>
+        public const int REGEXPTERM = 24;
+        /// <summary>RegularExpression Id. </summary>
+        public const int RANGEIN_START = 25;
+        /// <summary>RegularExpression Id. </summary>
+        public const int RANGEEX_START = 26;
+        /// <summary>RegularExpression Id. </summary>
+        public const int NUMBER = 27;
+        /// <summary>RegularExpression Id. </summary>
+        public const int RANGE_TO = 28;
+        /// <summary>RegularExpression Id. </summary>
+        public const int RANGEIN_END = 29;
+        /// <summary>RegularExpression Id. </summary>
+        public const int RANGEEX_END = 30;
+        /// <summary>RegularExpression Id. </summary>
+        public const int RANGE_QUOTED = 31;
+        /// <summary>RegularExpression Id. </summary>
+        public const int RANGE_GOOP = 32;
+    }
+
+    public static class LexicalToken
+    {
+        /// <summary>Lexical state.</summary>
+        public const int Boost = 0;
+        /// <summary>Lexical state.</summary>
+        public const int Range = 1;
+        /// <summary>Lexical state.</summary>
+        public const int DEFAULT = 2;
+    }
+
+    // NOTE: In Java, this was an interface. However, in 
+    // .NET we cannot define constants in an interface.
+    // So, instead we are making it a static class so it 
+    // can be shared between classes with different base classes.
+
+    // public interface QueryParserConstants
+
+	/// <summary> Token literal values and constants.
+	/// Generated by org.javacc.parser.OtherFilesGen#start()
+	/// </summary>
+    public static class QueryParserConstants
+    {
+        ///// <summary>End of File. </summary>
+        //public const int EndOfFileToken = 0;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int NumCharToken = 1;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int EscapedCharToken = 2;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int TermStartCharToken = 3;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int TermCharToken = 4;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int WhitespaceToken = 5;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int QuotedCharToken = 6;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int AndToken = 8;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int OrToken = 9;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int NotToken = 10;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int PlusToken = 11;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int MinusToken = 12;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int BareOperToken = 13;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int LParanToken = 14;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int RParenToken = 15;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int ColonToken = 16;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int StarToken = 17;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int CaratToken = 18;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int QuotedToken = 19;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int TermToken = 20;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int FuzzySlopToken = 21;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int PrefixTermToken = 22;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int WildTermToken = 23;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int RegExpTermToken = 24;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int RangeInStartToken = 25;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int RangeExStartToken = 26;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int NumberToken = 27;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int RangeToToken = 28;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int RangeInEndToken = 29;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int RangeExEndToken = 30;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int RangeQuotedToken = 31;
+        ///// <summary>RegularExpression Id. </summary>
+        //public const int RangeGoopToken = 32;
+
+        ///// <summary>Lexical state. </summary>
+        //public const int BoostToken = 0;
+        ///// <summary>Lexical state. </summary>
+        //public const int RangeToken = 1;
+        ///// <summary>Lexical state. </summary>
+        //public const int DefaultToken = 2;
+
+		/// <summary>Literal token values. </summary>
+		public static string[] TokenImage = new string[] {
+            "<EOF>", 
+            "<_NUM_CHAR>", 
+            "<_ESCAPED_CHAR>", 
+            "<_TERM_START_CHAR>", 
+            "<_TERM_CHAR>", 
+            "<_WHITESPACE>", 
+            "<_QUOTED_CHAR>", 
+            "<token of kind 7>", 
+            "<AND>", 
+            "<OR>", 
+            "<NOT>", 
+            "\"+\"", 
+            "\"-\"", 
+            "<BAREOPER>",
+            "\"(\"", 
+            "\")\"", 
+            "\":\"", 
+            "\"*\"", 
+            "\"^\"", 
+            "<QUOTED>", 
+            "<TERM>", 
+            "<FUZZY_SLOP>", 
+            "<PREFIXTERM>", 
+            "<WILDTERM>", 
+            "<REGEXPTERM>",
+            "\"[\"", 
+            "\"{\"", 
+            "<NUMBER>", 
+            "\"TO\"", 
+            "\"]\"", 
+            "<RANGEIN_QUOTED>", 
+            "<RANGEIN_GOOP>", 
+            "\"TO\"", 
+            "\"}\"", 
+            "<RANGE_QUOTED>",
+            "<RANGE_GOOP>"
+        };
+	}
+}
\ No newline at end of file


[04/50] [abbrv] lucenenet git commit: Changed QueryParserTokenManager line back to the way it was in Lucene.Net 3.0.3, although it differs than the Java version, this appears to be correct in .NET.

Posted by sy...@apache.org.
Changed QueryParserTokenManager line back to the way it was in Lucene.Net 3.0.3, although it differs than the Java version, this appears to be correct in .NET.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/c9b96c8d
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/c9b96c8d
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/c9b96c8d

Branch: refs/heads/master
Commit: c9b96c8d231e482d3e322f2546978c831c7eb08a
Parents: d5c4372
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Jul 31 18:43:14 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:29:47 2016 +0700

----------------------------------------------------------------------
 Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/c9b96c8d/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs b/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
index 3a483bb..e92bcb8 100644
--- a/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
+++ b/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
@@ -335,9 +335,11 @@ namespace Lucene.Net.QueryParser.Classic
 				}
 				else if (curChar < 128)
 				{
-                    // TODO: This didn't change in Java from 3.0.1 to 4.8.0, but it is different in .NET
-                    //ulong l = (ulong) (1L << (curChar & 63)); 
-                    ulong l = (ulong)(1L << (curChar & 077));
+                    // NOTE: This didn't change in Java from 3.0.1 to 4.8.0, but it is different in .NET.
+                    // But changing it back made more tests pass, so I am working under the assumption 63
+                    // is the correct value.
+                    //ulong l = (ulong)(1L << (curChar & 077));
+                    ulong l = (ulong) (1L << (curChar & 63)); 
 					do 
 					{
 						switch (jjstateSet[--i])


[30/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs b/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs
deleted file mode 100644
index f233c02..0000000
--- a/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs
+++ /dev/null
@@ -1,376 +0,0 @@
-\ufeffusing Lucene.Net.Analysis;
-using Lucene.Net.Documents;
-using Lucene.Net.Index;
-using Lucene.Net.Search;
-using Lucene.Net.Util;
-using NUnit.Framework;
-using System;
-using System.Collections.Generic;
-using System.IO;
-
-namespace Lucene.Net.QueryParser.Classic
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    [TestFixture]
-    public class TestMultiFieldQueryParser : LuceneTestCase
-    {
-        /// <summary>
-        /// test stop words parsing for both the non static form, and for the 
-        /// corresponding static form (qtxt, fields[]).
-        /// </summary>
-        [Test]
-        public void TestStopwordsParsing()
-        {
-            AssertStopQueryEquals("one", "b:one t:one");
-            AssertStopQueryEquals("one stop", "b:one t:one");
-            AssertStopQueryEquals("one (stop)", "b:one t:one");
-            AssertStopQueryEquals("one ((stop))", "b:one t:one");
-            AssertStopQueryEquals("stop", "");
-            AssertStopQueryEquals("(stop)", "");
-            AssertStopQueryEquals("((stop))", "");
-        }
-
-        /// <summary>
-        /// verify parsing of query using a stopping analyzer  
-        /// </summary>
-        /// <param name="qtxt"></param>
-        /// <param name="expectedRes"></param>
-        private void AssertStopQueryEquals(string qtxt, string expectedRes)
-        {
-            string[] fields = { "b", "t" };
-            BooleanClause.Occur[] occur = new BooleanClause.Occur[] { BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD };
-            TestQueryParser.QPTestAnalyzer a = new TestQueryParser.QPTestAnalyzer();
-            MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, a);
-
-            Query q = mfqp.Parse(qtxt);
-            assertEquals(expectedRes, q.toString());
-
-            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, qtxt, fields, occur, a);
-            assertEquals(expectedRes, q.toString());
-        }
-
-        [Test]
-        public void TestSimple()
-        {
-            string[] fields = { "b", "t" };
-            MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new MockAnalyzer(Random()));
-
-            Query q = mfqp.Parse("one");
-            assertEquals("b:one t:one", q.toString());
-
-            q = mfqp.Parse("one two");
-            assertEquals("(b:one t:one) (b:two t:two)", q.toString());
-
-            q = mfqp.Parse("+one +two");
-            assertEquals("+(b:one t:one) +(b:two t:two)", q.toString());
-
-            q = mfqp.Parse("+one -two -three");
-            assertEquals("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q.toString());
-
-            q = mfqp.Parse("one^2 two");
-            assertEquals("((b:one t:one)^2.0) (b:two t:two)", q.toString());
-
-            q = mfqp.Parse("one~ two");
-            assertEquals("(b:one~2 t:one~2) (b:two t:two)", q.toString());
-
-            q = mfqp.Parse("one~0.8 two^2");
-            assertEquals("(b:one~0 t:one~0) ((b:two t:two)^2.0)", q.toString());
-
-            q = mfqp.Parse("one* two*");
-            assertEquals("(b:one* t:one*) (b:two* t:two*)", q.toString());
-
-            q = mfqp.Parse("[a TO c] two");
-            assertEquals("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.toString());
-
-            q = mfqp.Parse("w?ldcard");
-            assertEquals("b:w?ldcard t:w?ldcard", q.toString());
-
-            q = mfqp.Parse("\"foo bar\"");
-            assertEquals("b:\"foo bar\" t:\"foo bar\"", q.toString());
-
-            q = mfqp.Parse("\"aa bb cc\" \"dd ee\"");
-            assertEquals("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", q.toString());
-
-            q = mfqp.Parse("\"foo bar\"~4");
-            assertEquals("b:\"foo bar\"~4 t:\"foo bar\"~4", q.toString());
-
-            // LUCENE-1213: MultiFieldQueryParser was ignoring slop when phrase had a field.
-            q = mfqp.Parse("b:\"foo bar\"~4");
-            assertEquals("b:\"foo bar\"~4", q.toString());
-
-            // make sure that terms which have a field are not touched:
-            q = mfqp.Parse("one f:two");
-            assertEquals("(b:one t:one) f:two", q.toString());
-
-            // AND mode:
-            mfqp.DefaultOperator = QueryParserBase.AND_OPERATOR;
-            q = mfqp.Parse("one two");
-            assertEquals("+(b:one t:one) +(b:two t:two)", q.toString());
-            q = mfqp.Parse("\"aa bb cc\" \"dd ee\"");
-            assertEquals("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")", q.toString());
-        }
-
-        [Test]
-        public void TestBoostsSimple()
-        {
-            IDictionary<string, float> boosts = new Dictionary<string, float>();
-            boosts["b"] = (float)5;
-            boosts["t"] = (float)10;
-            string[] fields = { "b", "t" };
-            MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new MockAnalyzer(Random()), boosts);
-
-
-            //Check for simple
-            Query q = mfqp.Parse("one");
-            assertEquals("b:one^5.0 t:one^10.0", q.toString());
-
-            //Check for AND
-            q = mfqp.Parse("one AND two");
-            assertEquals("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0)", q.toString());
-
-            //Check for OR
-            q = mfqp.Parse("one OR two");
-            assertEquals("(b:one^5.0 t:one^10.0) (b:two^5.0 t:two^10.0)", q.toString());
-
-            //Check for AND and a field
-            q = mfqp.Parse("one AND two AND foo:test");
-            assertEquals("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0) +foo:test", q.toString());
-
-            q = mfqp.Parse("one^3 AND two^4");
-            assertEquals("+((b:one^5.0 t:one^10.0)^3.0) +((b:two^5.0 t:two^10.0)^4.0)", q.toString());
-        }
-
-        [Test]
-        public void TestStaticMethod1()
-        {
-            string[] fields = { "b", "t" };
-            string[] queries = { "one", "two" };
-            Query q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries, fields, new MockAnalyzer(Random()));
-            assertEquals("b:one t:two", q.toString());
-
-            string[] queries2 = { "+one", "+two" };
-            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries2, fields, new MockAnalyzer(Random()));
-            assertEquals("(+b:one) (+t:two)", q.toString());
-
-            string[] queries3 = { "one", "+two" };
-            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries3, fields, new MockAnalyzer(Random()));
-            assertEquals("b:one (+t:two)", q.toString());
-
-            string[] queries4 = { "one +more", "+two" };
-            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries4, fields, new MockAnalyzer(Random()));
-            assertEquals("(b:one +b:more) (+t:two)", q.toString());
-
-            string[] queries5 = { "blah" };
-            try
-            {
-                q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries5, fields, new MockAnalyzer(Random()));
-                fail();
-            }
-            catch (ArgumentException e)
-            {
-                // expected exception, array length differs
-            }
-
-            // check also with stop words for this static form (qtxts[], fields[]).
-            TestQueryParser.QPTestAnalyzer stopA = new TestQueryParser.QPTestAnalyzer();
-
-            string[] queries6 = { "((+stop))", "+((stop))" };
-            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries6, fields, stopA);
-            assertEquals("", q.toString());
-
-            string[] queries7 = { "one ((+stop)) +more", "+((stop)) +two" };
-            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries7, fields, stopA);
-            assertEquals("(b:one +b:more) (+t:two)", q.toString());
-        }
-
-        [Test]
-        public void TestStaticMethod2()
-        {
-            string[] fields = { "b", "t" };
-            BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT };
-            Query q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "one", fields, flags, new MockAnalyzer(Random()));
-            assertEquals("+b:one -t:one", q.toString());
-
-            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "one two", fields, flags, new MockAnalyzer(Random()));
-            assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
-
-            try
-            {
-                BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
-                q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new MockAnalyzer(Random()));
-                fail();
-            }
-            catch (ArgumentException e)
-            {
-                // expected exception, array length differs
-            }
-        }
-
-        [Test]
-        public void TestStaticMethod2Old()
-        {
-            string[] fields = { "b", "t" };
-            //int[] flags = {MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD};
-            BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT };
-
-            Query q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "one", fields, flags, new MockAnalyzer(Random()));//, fields, flags, new MockAnalyzer(random));
-            assertEquals("+b:one -t:one", q.toString());
-
-            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "one two", fields, flags, new MockAnalyzer(Random()));
-            assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
-
-            try
-            {
-                BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
-                q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new MockAnalyzer(Random()));
-                fail();
-            }
-            catch (ArgumentException e)
-            {
-                // expected exception, array length differs
-            }
-        }
-
-        [Test]
-        public void TestStaticMethod3()
-        {
-            string[] queries = { "one", "two", "three" };
-            string[] fields = { "f1", "f2", "f3" };
-            BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST,
-                BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD};
-            Query q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries, fields, flags, new MockAnalyzer(Random()));
-            assertEquals("+f1:one -f2:two f3:three", q.toString());
-
-            try
-            {
-                BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
-                q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries, fields, flags2, new MockAnalyzer(Random()));
-                fail();
-            }
-            catch (ArgumentException e)
-            {
-                // expected exception, array length differs
-            }
-        }
-
-        [Test]
-        public void TestStaticMethod3Old()
-        {
-            string[] queries = { "one", "two" };
-            string[] fields = { "b", "t" };
-            BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT };
-            Query q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries, fields, flags, new MockAnalyzer(Random()));
-            assertEquals("+b:one -t:two", q.toString());
-
-            try
-            {
-                BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
-                q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries, fields, flags2, new MockAnalyzer(Random()));
-                fail();
-            }
-            catch (ArgumentException e)
-            {
-                // expected exception, array length differs
-            }
-        }
-
-        [Test]
-        public void TestAnalyzerReturningNull()
-        {
-            string[] fields = new string[] { "f1", "f2", "f3" };
-            MultiFieldQueryParser parser = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new AnalyzerReturningNull());
-            Query q = parser.Parse("bla AND blo");
-            assertEquals("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.toString());
-            // the following queries are not affected as their terms are not analyzed anyway:
-            q = parser.Parse("bla*");
-            assertEquals("f1:bla* f2:bla* f3:bla*", q.toString());
-            q = parser.Parse("bla~");
-            assertEquals("f1:bla~2 f2:bla~2 f3:bla~2", q.toString());
-            q = parser.Parse("[a TO c]");
-            assertEquals("f1:[a TO c] f2:[a TO c] f3:[a TO c]", q.toString());
-        }
-
-        [Test]
-        public void TestStopWordSearching()
-        {
-            Analyzer analyzer = new MockAnalyzer(Random());
-            using (var ramDir = NewDirectory())
-            {
-                using (IndexWriter iw = new IndexWriter(ramDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)))
-                {
-                    Document doc = new Document();
-                    doc.Add(NewTextField("body", "blah the footest blah", Field.Store.NO));
-                    iw.AddDocument(doc);
-                }
-
-                MultiFieldQueryParser mfqp =
-                  new MultiFieldQueryParser(TEST_VERSION_CURRENT, new string[] { "body" }, analyzer);
-                mfqp.DefaultOperator = QueryParser.Operator.AND;
-                Query q = mfqp.Parse("the footest");
-                using (IndexReader ir = DirectoryReader.Open(ramDir))
-                {
-                    IndexSearcher @is = NewSearcher(ir);
-                    ScoreDoc[] hits = @is.Search(q, null, 1000).ScoreDocs;
-                    assertEquals(1, hits.Length);
-                }
-            }
-        }
-
-        private class AnalyzerReturningNull : Analyzer
-        {
-            MockAnalyzer stdAnalyzer = new MockAnalyzer(Random());
-
-            public AnalyzerReturningNull()
-                : base(PER_FIELD_REUSE_STRATEGY)
-            { }
-
-            public override System.IO.TextReader InitReader(string fieldName, TextReader reader)
-            {
-                if ("f1".equals(fieldName))
-                {
-                    // we don't use the reader, so close it:
-                    IOUtils.CloseWhileHandlingException(reader);
-                    // return empty reader, so MockTokenizer returns no tokens:
-                    return new StringReader("");
-                }
-                else
-                {
-                    return base.InitReader(fieldName, reader);
-                }
-            }
-
-            public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
-            {
-                return stdAnalyzer.CreateComponents(fieldName, reader);
-            }
-        }
-
-        [Test]
-        public void TestSimpleRegex()
-        {
-            string[] fields = new string[] { "a", "b" };
-            MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new MockAnalyzer(Random()));
-
-            BooleanQuery bq = new BooleanQuery(true);
-            bq.Add(new RegexpQuery(new Term("a", "[a-z][123]")), BooleanClause.Occur.SHOULD);
-            bq.Add(new RegexpQuery(new Term("b", "[a-z][123]")), BooleanClause.Occur.SHOULD);
-            assertEquals(bq, mfqp.Parse("/[a-z][123]/"));
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Classic/TestMultiPhraseQueryParsing.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Classic/TestMultiPhraseQueryParsing.cs b/Lucene.Net.Tests.QueryParser/Classic/TestMultiPhraseQueryParsing.cs
deleted file mode 100644
index 3aaa9b2..0000000
--- a/Lucene.Net.Tests.QueryParser/Classic/TestMultiPhraseQueryParsing.cs
+++ /dev/null
@@ -1,121 +0,0 @@
-\ufeffusing Lucene.Net.Analysis;
-using Lucene.Net.Analysis.Tokenattributes;
-using Lucene.Net.Index;
-using Lucene.Net.Search;
-using Lucene.Net.Util;
-using NUnit.Framework;
-
-namespace Lucene.Net.QueryParser.Classic
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    [TestFixture]
-    public class TestMultiPhraseQueryParsing_ : LuceneTestCase
-    {
-        private class TokenAndPos
-        {
-            public readonly string token;
-            public readonly int pos;
-            public TokenAndPos(string token, int pos)
-            {
-                this.token = token;
-                this.pos = pos;
-            }
-        }
-
-        private class CannedAnalyzer : Analyzer
-        {
-            private readonly TokenAndPos[] tokens;
-
-            public CannedAnalyzer(TokenAndPos[] tokens)
-            {
-                this.tokens = tokens;
-            }
-
-            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
-            {
-                return new TokenStreamComponents(new CannedTokenizer(reader, tokens));
-            }
-        }
-
-        private class CannedTokenizer : Tokenizer
-        {
-            private readonly TokenAndPos[] tokens;
-            private int upto = 0;
-            private int lastPos = 0;
-            private readonly ICharTermAttribute termAtt;
-            private readonly IPositionIncrementAttribute posIncrAtt;
-
-            public CannedTokenizer(System.IO.TextReader reader, TokenAndPos[] tokens)
-                : base(reader)
-            {
-                this.tokens = tokens;
-                this.termAtt = AddAttribute<ICharTermAttribute>();
-                this.posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
-            }
-
-            public override sealed bool IncrementToken()
-            {
-                ClearAttributes();
-                if (upto < tokens.Length)
-                {
-                    TokenAndPos token = tokens[upto++];
-                    termAtt.SetEmpty();
-                    termAtt.Append(token.token);
-                    posIncrAtt.PositionIncrement = (token.pos - lastPos);
-                    lastPos = token.pos;
-                    return true;
-                }
-                else
-                {
-                    return false;
-                }
-            }
-            public override void Reset()
-            {
-                base.Reset();
-                this.upto = 0;
-                this.lastPos = 0;
-            }
-        }
-
-        [Test]
-        public void TestMultiPhraseQueryParsing()
-        {
-            TokenAndPos[] INCR_0_QUERY_TOKENS_AND = new TokenAndPos[]
-            {
-                new TokenAndPos("a", 0),
-                new TokenAndPos("1", 0),
-                new TokenAndPos("b", 1),
-                new TokenAndPos("1", 1),
-                new TokenAndPos("c", 2)
-            };
-
-            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new CannedAnalyzer(INCR_0_QUERY_TOKENS_AND));
-            Query q = qp.Parse("\"this text is acually ignored\"");
-            assertTrue("wrong query type!", q is MultiPhraseQuery);
-
-            MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery();
-            multiPhraseQuery.Add(new Term[] { new Term("field", "a"), new Term("field", "1") }, -1);
-            multiPhraseQuery.Add(new Term[] { new Term("field", "b"), new Term("field", "1") }, 0);
-            multiPhraseQuery.Add(new Term[] { new Term("field", "c") }, 1);
-
-            assertEquals(multiPhraseQuery, q);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs b/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
deleted file mode 100644
index 369fe92..0000000
--- a/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
+++ /dev/null
@@ -1,564 +0,0 @@
-\ufeffusing Lucene.Net.Analysis;
-using Lucene.Net.Analysis.Tokenattributes;
-using Lucene.Net.Documents;
-using Lucene.Net.QueryParser.Flexible.Standard;
-using Lucene.Net.QueryParser.Util;
-using Lucene.Net.Search;
-using Lucene.Net.Support;
-using NUnit.Framework;
-using System;
-using System.Diagnostics;
-
-namespace Lucene.Net.QueryParser.Classic
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    [TestFixture]
-    public class TestQueryParser : QueryParserTestBase
-    {
-        public class QPTestParser : QueryParser
-        {
-            public QPTestParser(string f, Analyzer a)
-                : base(TEST_VERSION_CURRENT, f, a)
-            {
-            }
-
-            protected internal override Query GetFuzzyQuery(string field, string termStr, float minSimilarity)
-            {
-                throw new ParseException("Fuzzy queries not allowed");
-            }
-
-            protected internal override Query GetWildcardQuery(string field, string termStr)
-            {
-                throw new ParseException("Wildcard queries not allowed");
-            }
-
-        }
-
-        // Moved to QueryParserTestBase
-        //public QueryParser GetParser(Analyzer a)
-        //{
-        //    if (a == null) a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
-        //    QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, DefaultField, a);
-        //    qp.DefaultOperator = (QueryParserBase.OR_OPERATOR);
-        //    return qp;
-        //}
-
-        // Moved to QueryParserTestBase
-        //public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a)
-        //{
-        //    return GetParser(a);
-        //}
-
-        // Moved to QueryParserTestBase
-        //public override Query GetQuery(string query, ICommonQueryParserConfiguration cqpC)
-        //{
-        //    Debug.Assert(cqpC != null, "Parameter must not be null");
-        //    Debug.Assert(cqpC is QueryParser, "Parameter must be instance of QueryParser");
-        //    QueryParser qp = (QueryParser)cqpC;
-        //    return qp.Parse(query);
-        //}
-
-        // Moved to QueryParserTestBase
-        //public override Query GetQuery(string query, Analyzer a)
-        //{
-        //    return GetParser(a).Parse(query);
-        //}
-
-        // Moved to QueryParserTestBase
-        //public override bool IsQueryParserException(Exception exception)
-        //{
-        //    return exception is ParseException;
-        //}
-
-        // Moved to QueryParserTestBase
-        //public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC)
-        //{
-        //    Debug.Assert(cqpC is QueryParser);
-        //    QueryParser qp = (QueryParser)cqpC;
-        //    qp.DefaultOperator = QueryParserBase.Operator.OR;
-        //}
-
-        // Moved to QueryParserTestBase
-        //public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC)
-        //{
-        //    Debug.Assert(cqpC is QueryParser);
-        //    QueryParser qp = (QueryParser)cqpC;
-        //    qp.DefaultOperator = QueryParserBase.Operator.AND;
-        //}
-
-        // Moved to QueryParserTestBase
-        //public override void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC, bool value)
-        //{
-        //    Debug.Assert(cqpC is QueryParser);
-        //    QueryParser qp = (QueryParser)cqpC;
-        //    qp.AnalyzeRangeTerms = (value);
-        //}
-
-        // Moved to QueryParserTestBase
-        //public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC, bool value)
-        //{
-        //    Debug.Assert(cqpC is QueryParser);
-        //    QueryParser qp = (QueryParser)cqpC;
-        //    qp.AutoGeneratePhraseQueries = value;
-        //}
-
-        // Moved to QueryParserTestBase
-        //public override void SetDateResolution(ICommonQueryParserConfiguration cqpC, ICharSequence field, DateTools.Resolution value)
-        //{
-        //    Debug.Assert(cqpC is QueryParser);
-        //    QueryParser qp = (QueryParser)cqpC;
-        //    qp.SetDateResolution(field.toString(), value);
-        //}
-
-        [Test]
-        public override void TestDefaultOperator()
-        {
-            QueryParser qp = GetParser(new MockAnalyzer(Random()));
-            // make sure OR is the default:
-            assertEquals(QueryParserBase.OR_OPERATOR, qp.DefaultOperator);
-            SetDefaultOperatorAND(qp);
-            assertEquals(QueryParserBase.AND_OPERATOR, qp.DefaultOperator);
-            SetDefaultOperatorOR(qp);
-            assertEquals(QueryParserBase.OR_OPERATOR, qp.DefaultOperator);
-        }
-
-        // LUCENE-2002: when we run javacc to regen QueryParser,
-        // we also run a replaceregexp step to fix 2 of the public
-        // ctors (change them to protected):
-        //
-        // protected QueryParser(CharStream stream)
-        //
-        // protected QueryParser(QueryParserTokenManager tm)
-        //
-        // This test is here as a safety, in case that ant step
-        // doesn't work for some reason.
-        [Test]
-        public void TestProtectedCtors()
-        {
-            try
-            {
-                typeof(QueryParser).GetConstructor(new Type[] { typeof(ICharStream) });
-                fail("please switch public QueryParser(CharStream) to be protected");
-            }
-            catch (Exception nsme)
-            {
-                // expected
-            }
-            try
-            {
-                typeof(QueryParser).GetConstructor(new Type[] { typeof(QueryParserTokenManager) });
-                fail("please switch public QueryParser(QueryParserTokenManager) to be protected");
-            }
-            catch (Exception nsme)
-            {
-                // expected
-            }
-        }
-
-        private class TestFuzzySlopeExtendabilityQueryParser : QueryParser
-        {
-            public TestFuzzySlopeExtendabilityQueryParser()
-                : base(TEST_VERSION_CURRENT, "a", new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false))
-            {}
-
-            protected internal override Query HandleBareFuzzy(string qfield, Token fuzzySlop, string termImage)
-            {
-                if (fuzzySlop.image.EndsWith("\u20ac"))
-                {
-                    float fms = FuzzyMinSim;
-                    try
-                    {
-                        fms = float.Parse(fuzzySlop.image.Substring(1, fuzzySlop.image.Length - 2));
-                    }
-                    catch (Exception ignored) { }
-                    float value = float.Parse(termImage);
-                    return GetRangeQuery(qfield, (value - fms / 2.0f).ToString(), (value + fms / 2.0f).ToString(), true, true);
-                }
-                return base.HandleBareFuzzy(qfield, fuzzySlop, termImage);
-            }
-        }
-
-        [Test]
-        public void TestFuzzySlopeExtendability()
-        {
-            QueryParser qp = new TestFuzzySlopeExtendabilityQueryParser();
-            assertEquals(qp.Parse("a:[11.95 TO 12.95]"), qp.Parse("12.45~1\u20ac"));
-        }
-
-        private class TestStarParsingQueryParser : QueryParser
-        {
-            public readonly int[] type = new int[1];
-
-            public TestStarParsingQueryParser()
-                : base(TEST_VERSION_CURRENT, "field", new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false))
-            { }
-
-            protected internal override Query GetWildcardQuery(string field, string termStr)
-            {
-                // override error checking of superclass
-                type[0] = 1;
-                return new TermQuery(new Index.Term(field, termStr));
-            }
-
-            protected internal override Query GetPrefixQuery(string field, string termStr)
-            {
-                // override error checking of superclass
-                type[0] = 2;
-                return new TermQuery(new Index.Term(field, termStr));
-            }
-
-            protected internal override Query GetFieldQuery(string field, string queryText, bool quoted)
-            {
-                type[0] = 3;
-                return base.GetFieldQuery(field, queryText, quoted);
-            }
-        }
-
-        [Test]
-        public override void TestStarParsing()
-        {
-            TestStarParsingQueryParser qp = new TestStarParsingQueryParser();
-
-            TermQuery tq;
-
-            tq = (TermQuery)qp.Parse("foo:zoo*");
-            assertEquals("zoo", tq.Term.Text());
-            assertEquals(2, qp.type[0]);
-
-            tq = (TermQuery)qp.Parse("foo:zoo*^2");
-            assertEquals("zoo", tq.Term.Text());
-            assertEquals(2, qp.type[0]);
-            assertEquals(tq.Boost, 2, 0);
-
-            tq = (TermQuery)qp.Parse("foo:*");
-            assertEquals("*", tq.Term.Text());
-            assertEquals(1, qp.type[0]); // could be a valid prefix query in the future too
-
-            tq = (TermQuery)qp.Parse("foo:*^2");
-            assertEquals("*", tq.Term.Text());
-            assertEquals(1, qp.type[0]);
-            assertEquals(tq.Boost, 2, 0);
-
-            tq = (TermQuery)qp.Parse("*:foo");
-            assertEquals("*", tq.Term.Field);
-            assertEquals("foo", tq.Term.Text());
-            assertEquals(3, qp.type[0]);
-
-            tq = (TermQuery)qp.Parse("*:*");
-            assertEquals("*", tq.Term.Field);
-            assertEquals("*", tq.Term.Text());
-            assertEquals(1, qp.type[0]); // could be handled as a prefix query in the
-            // future
-
-            tq = (TermQuery)qp.Parse("(*:*)");
-            assertEquals("*", tq.Term.Field);
-            assertEquals("*", tq.Term.Text());
-            assertEquals(1, qp.type[0]);
-        }
-
-        [Test]
-        public void TestCustomQueryParserWildcard()
-        {
-            try
-            {
-                new QPTestParser("contents", new MockAnalyzer(Random(),
-                    MockTokenizer.WHITESPACE, false)).Parse("a?t");
-                fail("Wildcard queries should not be allowed");
-            }
-            catch (ParseException expected)
-            {
-                // expected exception
-            }
-        }
-
-        [Test]
-        public void TestCustomQueryParserFuzzy()
-        {
-            try
-            {
-                new QPTestParser("contents", new MockAnalyzer(Random(),
-                    MockTokenizer.WHITESPACE, false)).Parse("xunit~");
-                fail("Fuzzy queries should not be allowed");
-            }
-            catch (ParseException expected)
-            {
-                // expected exception
-            }
-        }
-
-        /// <summary>
-        /// query parser that doesn't expand synonyms when users use double quotes
-        /// </summary>
-        private class SmartQueryParser : QueryParser
-        {
-            Analyzer morePrecise = new Analyzer2();
-
-            public SmartQueryParser()
-                : base(TEST_VERSION_CURRENT, "field", new Analyzer1())
-            {
-            }
-
-            protected internal override Query GetFieldQuery(string field, string queryText, bool quoted)
-            {
-                if (quoted) return NewFieldQuery(morePrecise, field, queryText, quoted);
-                else return base.GetFieldQuery(field, queryText, quoted);
-            }
-        }
-
-        public override void TestNewFieldQuery()
-        {
-            /** ordinary behavior, synonyms form uncoordinated boolean query */
-            QueryParser dumb = new QueryParser(TEST_VERSION_CURRENT, "field",
-                new Analyzer1());
-            BooleanQuery expanded = new BooleanQuery(true);
-            expanded.Add(new TermQuery(new Index.Term("field", "dogs")),
-                BooleanClause.Occur.SHOULD);
-            expanded.Add(new TermQuery(new Index.Term("field", "dog")),
-                BooleanClause.Occur.SHOULD);
-            assertEquals(expanded, dumb.Parse("\"dogs\""));
-            /** even with the phrase operator the behavior is the same */
-            assertEquals(expanded, dumb.Parse("dogs"));
-
-            /**
-             * custom behavior, the synonyms are expanded, unless you use quote operator
-             */
-            QueryParser smart = new SmartQueryParser();
-            assertEquals(expanded, smart.Parse("dogs"));
-
-            Query unexpanded = new TermQuery(new Index.Term("field", "dogs"));
-            assertEquals(unexpanded, smart.Parse("\"dogs\""));
-        }
-
-        // LUCENETODO: fold these into QueryParserTestBase
-
-        /// <summary>
-        /// adds synonym of "dog" for "dogs".
-        /// </summary>
-        public class MockSynonymAnalyzer : Analyzer
-        {
-            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
-            {
-                MockTokenizer tokenizer = new MockTokenizer(reader);
-                return new TokenStreamComponents(tokenizer, new MockSynonymFilter(tokenizer));
-            }
-        }
-
-        /// <summary>
-        /// simple synonyms test
-        /// </summary>
-        [Test]
-        public void TestSynonyms()
-        {
-            BooleanQuery expected = new BooleanQuery(true);
-            expected.Add(new TermQuery(new Index.Term("field", "dogs")), BooleanClause.Occur.SHOULD);
-            expected.Add(new TermQuery(new Index.Term("field", "dog")), BooleanClause.Occur.SHOULD);
-            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockSynonymAnalyzer());
-            assertEquals(expected, qp.Parse("dogs"));
-            assertEquals(expected, qp.Parse("\"dogs\""));
-            qp.DefaultOperator = (QueryParserBase.Operator.AND);
-            assertEquals(expected, qp.Parse("dogs"));
-            assertEquals(expected, qp.Parse("\"dogs\""));
-            expected.Boost = (2.0f);
-            assertEquals(expected, qp.Parse("dogs^2"));
-            assertEquals(expected, qp.Parse("\"dogs\"^2"));
-        }
-
-        /// <summary>
-        /// forms multiphrase query
-        /// </summary>
-        [Test]
-        public void TestSynonymsPhrase()
-        {
-            MultiPhraseQuery expected = new MultiPhraseQuery();
-            expected.Add(new Index.Term("field", "old"));
-            expected.Add(new Index.Term[] { new Index.Term("field", "dogs"), new Index.Term("field", "dog") });
-            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockSynonymAnalyzer());
-            assertEquals(expected, qp.Parse("\"old dogs\""));
-            qp.DefaultOperator = (QueryParserBase.Operator.AND);
-            assertEquals(expected, qp.Parse("\"old dogs\""));
-            expected.Boost = (2.0f);
-            assertEquals(expected, qp.Parse("\"old dogs\"^2"));
-            expected.Slop = (3);
-            assertEquals(expected, qp.Parse("\"old dogs\"~3^2"));
-        }
-
-        /// <summary>
-        /// adds synonym of "\u570b" for "\u56fd".
-        /// </summary>
-        protected internal class MockCJKSynonymFilter : TokenFilter
-        {
-            internal ICharTermAttribute TermAtt;
-            internal IPositionIncrementAttribute PosIncAtt;
-            internal bool AddSynonym = false;
-
-            public MockCJKSynonymFilter(TokenStream input)
-                : base(input)
-            {
-                TermAtt = AddAttribute<ICharTermAttribute>();
-                PosIncAtt = AddAttribute<IPositionIncrementAttribute>();
-            }
-
-            public sealed override bool IncrementToken()
-            {
-                if (AddSynonym) // inject our synonym
-                {
-                    ClearAttributes();
-                    TermAtt.SetEmpty().Append("\u570b");
-                    PosIncAtt.PositionIncrement = 0;
-                    AddSynonym = false;
-                    return true;
-                }
-
-                if (input.IncrementToken())
-                {
-                    AddSynonym = TermAtt.ToString().Equals("\u56fd");
-                    return true;
-                }
-                else
-                {
-                    return false;
-                }
-            }
-        }
-
-        protected class MockCJKSynonymAnalyzer : Analyzer
-        {
-            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
-            {
-                Tokenizer tokenizer = new SimpleCJKTokenizer(reader);
-                return new TokenStreamComponents(tokenizer, new MockCJKSynonymFilter(tokenizer));
-            }
-        }
-
-        /// <summary>
-        /// simple CJK synonym test
-        /// </summary>
-        [Test]
-        public void TestCJKSynonym()
-        {
-            BooleanQuery expected = new BooleanQuery(true);
-            expected.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
-            expected.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
-            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
-            assertEquals(expected, qp.Parse("\u56fd"));
-            qp.DefaultOperator = (QueryParserBase.Operator.AND);
-            assertEquals(expected, qp.Parse("\u56fd"));
-            expected.Boost = (2.0f);
-            assertEquals(expected, qp.Parse("\u56fd^2"));
-        }
-
-        /// <summary>
-        /// synonyms with default OR operator 
-        /// </summary>
-        [Test]
-        public void TestCJKSynonymsOR()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(new TermQuery(new Index.Term("field", "\u4e2d")), BooleanClause.Occur.SHOULD);
-            BooleanQuery inner = new BooleanQuery(true);
-            inner.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
-            inner.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
-            expected.Add(inner, BooleanClause.Occur.SHOULD);
-            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
-            assertEquals(expected, qp.Parse("\u4e2d\u56fd"));
-            expected.Boost = (2.0f);
-            assertEquals(expected, qp.Parse("\u4e2d\u56fd^2"));
-        }
-
-        /// <summary>
-        /// more complex synonyms with default OR operator
-        /// </summary>
-        [Test]
-        public void TestCJKSynonymsOR2()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(new TermQuery(new Index.Term("field", "\u4e2d")), BooleanClause.Occur.SHOULD);
-            BooleanQuery inner = new BooleanQuery(true);
-            inner.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
-            inner.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
-            expected.Add(inner, BooleanClause.Occur.SHOULD);
-            BooleanQuery inner2 = new BooleanQuery(true);
-            inner2.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
-            inner2.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
-            expected.Add(inner2, BooleanClause.Occur.SHOULD);
-            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
-            assertEquals(expected, qp.Parse("\u4e2d\u56fd\u56fd"));
-            expected.Boost = (2.0f);
-            assertEquals(expected, qp.Parse("\u4e2d\u56fd\u56fd^2"));
-        }
-
-        /// <summary>
-        /// synonyms with default AND operator
-        /// </summary>
-        [Test]
-        public void TestCJKSynonymsAND()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(new TermQuery(new Index.Term("field", "\u4e2d")), BooleanClause.Occur.MUST);
-            BooleanQuery inner = new BooleanQuery(true);
-            inner.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
-            inner.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
-            expected.Add(inner, BooleanClause.Occur.MUST);
-            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
-            qp.DefaultOperator = (QueryParserBase.Operator.AND);
-            assertEquals(expected, qp.Parse("\u4e2d\u56fd"));
-            expected.Boost = (2.0f);
-            assertEquals(expected, qp.Parse("\u4e2d\u56fd^2"));
-        }
-
-        /// <summary>
-        /// more complex synonyms with default AND operator
-        /// </summary>
-        [Test]
-        public void TestCJKSynonymsAND2()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(new TermQuery(new Index.Term("field", "\u4e2d")), BooleanClause.Occur.MUST);
-            BooleanQuery inner = new BooleanQuery(true);
-            inner.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
-            inner.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
-            expected.Add(inner, BooleanClause.Occur.MUST);
-            BooleanQuery inner2 = new BooleanQuery(true);
-            inner2.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
-            inner2.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
-            expected.Add(inner2, BooleanClause.Occur.MUST);
-            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
-            qp.DefaultOperator = (QueryParserBase.Operator.AND);
-            assertEquals(expected, qp.Parse("\u4e2d\u56fd\u56fd"));
-            expected.Boost = (2.0f);
-            assertEquals(expected, qp.Parse("\u4e2d\u56fd\u56fd^2"));
-        }
-
-        [Test]
-        public void TestCJKSynonymsPhrase()
-        {
-            MultiPhraseQuery expected = new MultiPhraseQuery();
-            expected.Add(new Index.Term("field", "\u4e2d"));
-            expected.Add(new Index.Term[] { new Index.Term("field", "\u56fd"), new Index.Term("field", "\u570b") });
-            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
-            qp.DefaultOperator = (QueryParserBase.Operator.AND);
-            assertEquals(expected, qp.Parse("\"\u4e2d\u56fd\""));
-            expected.Boost = (2.0f);
-            assertEquals(expected, qp.Parse("\"\u4e2d\u56fd\"^2"));
-            expected.Slop = (3);
-            assertEquals(expected, qp.Parse("\"\u4e2d\u56fd\"~3^2"));
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/ComplexPhrase/TestComplexPhraseQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/ComplexPhrase/TestComplexPhraseQuery.cs b/Lucene.Net.Tests.QueryParser/ComplexPhrase/TestComplexPhraseQuery.cs
deleted file mode 100644
index 2c2d6e2..0000000
--- a/Lucene.Net.Tests.QueryParser/ComplexPhrase/TestComplexPhraseQuery.cs
+++ /dev/null
@@ -1,214 +0,0 @@
-\ufeffusing Lucene.Net.Analysis;
-using Lucene.Net.Documents;
-using Lucene.Net.Index;
-using Lucene.Net.Search;
-using Lucene.Net.Store;
-using Lucene.Net.Util;
-using NUnit.Framework;
-using System;
-using System.Collections.Generic;
-
-namespace Lucene.Net.QueryParser.ComplexPhrase
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    [TestFixture]
-    public class TestComplexPhraseQuery : LuceneTestCase
-    {
-        Directory rd;
-        Analyzer analyzer;
-        DocData[] docsContent = {
-            new DocData("john smith", "1", "developer"),
-            new DocData("johathon smith", "2", "developer"),
-            new DocData("john percival smith", "3", "designer"),
-            new DocData("jackson waits tom", "4", "project manager")
-        };
-
-        private IndexSearcher searcher;
-        private IndexReader reader;
-
-        string defaultFieldName = "name";
-
-        bool inOrder = true;
-
-        [Test]
-        public void TestComplexPhrases()
-        {
-            CheckMatches("\"john smith\"", "1"); // Simple multi-term still works
-            CheckMatches("\"j*   smyth~\"", "1,2"); // wildcards and fuzzies are OK in
-            // phrases
-            CheckMatches("\"(jo* -john)  smith\"", "2"); // boolean logic works
-            CheckMatches("\"jo*  smith\"~2", "1,2,3"); // position logic works.
-            CheckMatches("\"jo* [sma TO smZ]\" ", "1,2"); // range queries supported
-            CheckMatches("\"john\"", "1,3"); // Simple single-term still works
-            CheckMatches("\"(john OR johathon)  smith\"", "1,2"); // boolean logic with
-            // brackets works.
-            CheckMatches("\"(jo* -john) smyth~\"", "2"); // boolean logic with
-            // brackets works.
-
-            // CheckMatches("\"john -percival\"", "1"); // not logic doesn't work
-            // currently :(.
-
-            CheckMatches("\"john  nosuchword*\"", ""); // phrases with clauses producing
-            // empty sets
-
-            CheckBadQuery("\"jo*  id:1 smith\""); // mixing fields in a phrase is bad
-            CheckBadQuery("\"jo* \"smith\" \""); // phrases inside phrases is bad
-        }
-
-        [Test]
-        public void TestUnOrderedProximitySearches()
-        {
-            inOrder = true;
-            CheckMatches("\"smith jo*\"~2", ""); // ordered proximity produces empty set
-
-            inOrder = false;
-            CheckMatches("\"smith jo*\"~2", "1,2,3"); // un-ordered proximity
-        }
-
-        private void CheckBadQuery(String qString)
-        {
-            ComplexPhraseQueryParser qp = new ComplexPhraseQueryParser(TEST_VERSION_CURRENT, defaultFieldName, analyzer);
-            qp.InOrder = inOrder;
-            Exception expected = null;
-            try
-            {
-                qp.Parse(qString);
-            }
-            catch (Exception e)
-            {
-                expected = e;
-            }
-            assertNotNull("Expected parse error in " + qString, expected);
-        }
-
-        private void CheckMatches(string qString, string expectedVals)
-        {
-            ComplexPhraseQueryParser qp = new ComplexPhraseQueryParser(TEST_VERSION_CURRENT, defaultFieldName, analyzer);
-            qp.InOrder = inOrder;
-            qp.FuzzyPrefixLength = 1; // usually a good idea
-
-            Query q = qp.Parse(qString);
-
-            HashSet<string> expecteds = new HashSet<string>();
-            string[] vals = expectedVals.Split(new char[] {','}, StringSplitOptions.RemoveEmptyEntries);
-            for (int i = 0; i < vals.Length; i++)
-            {
-                if (vals[i].Length > 0)
-                    expecteds.Add(vals[i]);
-            }
-
-            TopDocs td = searcher.Search(q, 10);
-            ScoreDoc[] sd = td.ScoreDocs;
-            for (int i = 0; i < sd.Length; i++)
-            {
-                Document doc = searcher.Doc(sd[i].Doc);
-                string id = doc.Get("id");
-                assertTrue(qString + "matched doc#" + id + " not expected", expecteds
-                    .Contains(id));
-                expecteds.Remove(id);
-            }
-
-            assertEquals(qString + " missing some matches ", 0, expecteds.Count);
-        }
-
-        [Test]
-        public void TestFieldedQuery()
-        {
-            CheckMatches("name:\"john smith\"", "1");
-            CheckMatches("name:\"j*   smyth~\"", "1,2");
-            CheckMatches("role:\"developer\"", "1,2");
-            CheckMatches("role:\"p* manager\"", "4");
-            CheckMatches("role:de*", "1,2,3");
-            CheckMatches("name:\"j* smyth~\"~5", "1,2,3");
-            CheckMatches("role:\"p* manager\" AND name:jack*", "4");
-            CheckMatches("+role:developer +name:jack*", "");
-            CheckMatches("name:\"john smith\"~2 AND role:designer AND id:3", "3");
-        }
-
-        [Test]
-        public void TestHashcodeEquals()
-        {
-            ComplexPhraseQueryParser qp = new ComplexPhraseQueryParser(TEST_VERSION_CURRENT, defaultFieldName, analyzer);
-            qp.InOrder = true;
-            qp.FuzzyPrefixLength = 1;
-
-            String qString = "\"aaa* bbb*\"";
-
-            Query q = qp.Parse(qString);
-            Query q2 = qp.Parse(qString);
-
-            assertEquals(q.GetHashCode(), q2.GetHashCode());
-            assertEquals(q, q2);
-
-            qp.InOrder = (false); // SOLR-6011
-
-            q2 = qp.Parse(qString);
-
-            // although the general contract of hashCode can't guarantee different values, if we only change one thing
-            // about a single query, it normally should result in a different value (and will with the current
-            // implementation in ComplexPhraseQuery)
-            assertTrue(q.GetHashCode() != q2.GetHashCode());
-            assertTrue(!q.equals(q2));
-            assertTrue(!q2.equals(q));
-        }
-
-        public override void SetUp()
-        {
-            base.SetUp();
-
-            analyzer = new MockAnalyzer(Random());
-            rd = NewDirectory();
-            using (IndexWriter w = new IndexWriter(rd, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)))
-            {
-                for (int i = 0; i < docsContent.Length; i++)
-                {
-                    Document doc = new Document();
-                    doc.Add(NewTextField("name", docsContent[i].Name, Field.Store.YES));
-                    doc.Add(NewTextField("id", docsContent[i].Id, Field.Store.YES));
-                    doc.Add(NewTextField("role", docsContent[i].Role, Field.Store.YES));
-                    w.AddDocument(doc);
-                }
-            }
-            reader = DirectoryReader.Open(rd);
-            searcher = NewSearcher(reader);
-        }
-
-        public override void TearDown()
-        {
-            reader.Dispose();
-            rd.Dispose();
-            base.TearDown();
-        }
-
-
-        private class DocData
-        {
-            public DocData(string name, string id, string role)
-            {
-                this.Name = name;
-                this.Id = id;
-                this.Role = role;
-            }
-
-            public string Name { get; private set; }
-            public string Id { get; private set; }
-            public string Role { get; private set; }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Ext/ExtensionStub.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Ext/ExtensionStub.cs b/Lucene.Net.Tests.QueryParser/Ext/ExtensionStub.cs
deleted file mode 100644
index cbef5d8..0000000
--- a/Lucene.Net.Tests.QueryParser/Ext/ExtensionStub.cs
+++ /dev/null
@@ -1,30 +0,0 @@
-\ufeffusing Lucene.Net.Index;
-using Lucene.Net.Search;
-
-namespace Lucene.Net.QueryParser.Ext
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    internal class ExtensionStub : ParserExtension
-    {
-        public override Query Parse(ExtensionQuery components)
-        {
-            return new TermQuery(new Term(components.Field, components.RawQueryString));
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Ext/TestExtendableQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Ext/TestExtendableQueryParser.cs b/Lucene.Net.Tests.QueryParser/Ext/TestExtendableQueryParser.cs
deleted file mode 100644
index 7e2e99e..0000000
--- a/Lucene.Net.Tests.QueryParser/Ext/TestExtendableQueryParser.cs
+++ /dev/null
@@ -1,145 +0,0 @@
-\ufeffusing Lucene.Net.Analysis;
-using Lucene.Net.QueryParser.Classic;
-using Lucene.Net.Search;
-using NUnit.Framework;
-using System.Globalization;
-
-namespace Lucene.Net.QueryParser.Ext
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Testcase for the class <see cref="ExtendableQueryParser"/>
-    /// </summary>
-    [TestFixture]
-    public class TestExtendableQueryParser : TestQueryParser
-    {
-        private static char[] DELIMITERS = new char[] {
-            Extensions.DEFAULT_EXTENSION_FIELD_DELIMITER, '-', '|' };
-
-        public override Classic.QueryParser GetParser(Analyzer a)
-        {
-            return GetParser(a, null);
-        }
-
-        public Classic.QueryParser GetParser(Analyzer a, Extensions extensions)
-        {
-            if (a == null)
-                a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
-            Classic.QueryParser qp = extensions == null ? new ExtendableQueryParser(
-                TEST_VERSION_CURRENT, DefaultField, a) : new ExtendableQueryParser(
-                TEST_VERSION_CURRENT, DefaultField, a, extensions);
-            qp.DefaultOperator = QueryParserBase.OR_OPERATOR;
-            return qp;
-        }
-
-        [Test]
-        public void TestUnescapedExtDelimiter()
-        {
-            Extensions ext = NewExtensions(':');
-            ext.Add("testExt", new ExtensionStub());
-            ExtendableQueryParser parser = (ExtendableQueryParser)GetParser(null, ext);
-            try
-            {
-                parser.Parse("aField:testExt:\"foo \\& bar\"");
-                fail("extension field delimiter is not escaped");
-            }
-            catch (ParseException e)
-            {
-            }
-        }
-
-        [Test]
-        public void TestExtFieldUnqoted()
-        {
-            for (int i = 0; i < DELIMITERS.Length; i++)
-            {
-                Extensions ext = NewExtensions(DELIMITERS[i]);
-                ext.Add("testExt", new ExtensionStub());
-                ExtendableQueryParser parser = (ExtendableQueryParser)GetParser(null,
-                    ext);
-                string field = ext.BuildExtensionField("testExt", "aField");
-                Query query = parser.Parse(string.Format(CultureInfo.InvariantCulture, "{0}:foo bar", field));
-                assertTrue("expected instance of BooleanQuery but was "
-                    + query.GetType(), query is BooleanQuery);
-                BooleanQuery bquery = (BooleanQuery)query;
-                BooleanClause[] clauses = bquery.Clauses;
-                assertEquals(2, clauses.Length);
-                BooleanClause booleanClause = clauses[0];
-                query = booleanClause.Query;
-                assertTrue("expected instance of TermQuery but was " + query.GetType(),
-                    query is TermQuery);
-                TermQuery tquery = (TermQuery)query;
-                assertEquals("aField", tquery.Term
-                    .Field);
-                assertEquals("foo", tquery.Term.Text());
-
-                booleanClause = clauses[1];
-                query = booleanClause.Query;
-                assertTrue("expected instance of TermQuery but was " + query.GetType(),
-                    query is TermQuery);
-                tquery = (TermQuery)query;
-                assertEquals(DefaultField, tquery.Term.Field);
-                assertEquals("bar", tquery.Term.Text());
-            }
-        }
-
-        [Test]
-        public void TestExtDefaultField()
-        {
-            for (int i = 0; i < DELIMITERS.Length; i++)
-            {
-                Extensions ext = NewExtensions(DELIMITERS[i]);
-                ext.Add("testExt", new ExtensionStub());
-                ExtendableQueryParser parser = (ExtendableQueryParser)GetParser(null,
-                    ext);
-                string field = ext.BuildExtensionField("testExt");
-                Query parse = parser.Parse(string.Format(CultureInfo.InvariantCulture, "{0}:\"foo \\& bar\"", field));
-                assertTrue("expected instance of TermQuery but was " + parse.GetType(),
-                    parse is TermQuery);
-                TermQuery tquery = (TermQuery)parse;
-                assertEquals(DefaultField, tquery.Term.Field);
-                assertEquals("foo & bar", tquery.Term.Text());
-            }
-        }
-
-        public Extensions NewExtensions(char delimiter)
-        {
-            return new Extensions(delimiter);
-        }
-
-        [Test]
-        public void TestExtField()
-        {
-            for (int i = 0; i < DELIMITERS.Length; i++)
-            {
-                Extensions ext = NewExtensions(DELIMITERS[i]);
-                ext.Add("testExt", new ExtensionStub());
-                ExtendableQueryParser parser = (ExtendableQueryParser)GetParser(null,
-                    ext);
-                string field = ext.BuildExtensionField("testExt", "afield");
-                Query parse = parser.Parse(string.Format(CultureInfo.InvariantCulture, "{0}:\"foo \\& bar\"", field));
-                assertTrue("expected instance of TermQuery but was " + parse.GetType(),
-                    parse is TermQuery);
-                TermQuery tquery = (TermQuery)parse;
-                assertEquals("afield", tquery.Term.Field);
-                assertEquals("foo & bar", tquery.Term.Text());
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Ext/TestExtensions.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Ext/TestExtensions.cs b/Lucene.Net.Tests.QueryParser/Ext/TestExtensions.cs
deleted file mode 100644
index 4850987..0000000
--- a/Lucene.Net.Tests.QueryParser/Ext/TestExtensions.cs
+++ /dev/null
@@ -1,97 +0,0 @@
-\ufeffusing Lucene.Net.Util;
-using NUnit.Framework;
-using System;
-
-namespace Lucene.Net.QueryParser.Ext
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Testcase for the <see cref="Extensions"/> class
-    /// </summary>
-    [TestFixture]
-    public class TestExtensions : LuceneTestCase
-    {
-        private Extensions ext;
-
-        public override void SetUp()
-        {
-            base.SetUp();
-            this.ext = new Extensions();
-        }
-
-        [Test]
-        public void TestBuildExtensionField()
-        {
-            assertEquals("field\\:key", ext.BuildExtensionField("key", "field"));
-            assertEquals("\\:key", ext.BuildExtensionField("key"));
-
-            ext = new Extensions('.');
-            assertEquals("field.key", ext.BuildExtensionField("key", "field"));
-            assertEquals(".key", ext.BuildExtensionField("key"));
-        }
-
-        [Test]
-        public void TestSplitExtensionField()
-        {
-            assertEquals("field\\:key", ext.BuildExtensionField("key", "field"));
-            assertEquals("\\:key", ext.BuildExtensionField("key"));
-            
-            ext = new Extensions('.');
-            assertEquals("field.key", ext.BuildExtensionField("key", "field"));
-            assertEquals(".key", ext.BuildExtensionField("key"));
-        }
-
-        [Test]
-        public void TestAddGetExtension()
-        {
-            ParserExtension extension = new ExtensionStub();
-            assertNull(ext.GetExtension("foo"));
-            ext.Add("foo", extension);
-            Assert.AreSame(extension, ext.GetExtension("foo"));
-            ext.Add("foo", null);
-            assertNull(ext.GetExtension("foo"));
-        }
-
-        [Test]
-        public void TestGetExtDelimiter()
-        {
-            assertEquals(Extensions.DEFAULT_EXTENSION_FIELD_DELIMITER, this.ext
-                .ExtensionFieldDelimiter);
-            ext = new Extensions('?');
-            assertEquals('?', this.ext.ExtensionFieldDelimiter);
-        }
-
-        [Test]
-        public void TestEscapeExtension()
-        {
-            assertEquals("abc\\:\\?\\{\\}\\[\\]\\\\\\(\\)\\+\\-\\!\\~", ext
-                .EscapeExtensionField("abc:?{}[]\\()+-!~"));
-            try
-            {
-                ext.EscapeExtensionField(null);
-                fail("should throw NPE - escape string is null");
-            }
-            //catch (NullPointerException e)
-            catch (Exception e)
-            {
-                // 
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj b/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
deleted file mode 100644
index 2094270..0000000
--- a/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
+++ /dev/null
@@ -1,95 +0,0 @@
-\ufeff<?xml version="1.0" encoding="utf-8"?>
-<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
-  <Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
-  <PropertyGroup>
-    <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
-    <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
-    <ProjectGuid>{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}</ProjectGuid>
-    <OutputType>Library</OutputType>
-    <AppDesignerFolder>Properties</AppDesignerFolder>
-    <RootNamespace>Lucene.Net.Tests.QueryParser</RootNamespace>
-    <AssemblyName>Lucene.Net.Tests.QueryParser</AssemblyName>
-    <TargetFrameworkVersion>v4.5.1</TargetFrameworkVersion>
-    <FileAlignment>512</FileAlignment>
-  </PropertyGroup>
-  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
-    <DebugSymbols>true</DebugSymbols>
-    <DebugType>full</DebugType>
-    <Optimize>false</Optimize>
-    <OutputPath>bin\Debug\</OutputPath>
-    <DefineConstants>DEBUG;TRACE</DefineConstants>
-    <ErrorReport>prompt</ErrorReport>
-    <WarningLevel>4</WarningLevel>
-  </PropertyGroup>
-  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
-    <DebugType>pdbonly</DebugType>
-    <Optimize>true</Optimize>
-    <OutputPath>bin\Release\</OutputPath>
-    <DefineConstants>TRACE</DefineConstants>
-    <ErrorReport>prompt</ErrorReport>
-    <WarningLevel>4</WarningLevel>
-  </PropertyGroup>
-  <ItemGroup>
-    <Reference Include="nunit.framework, Version=2.6.3.13283, Culture=neutral, PublicKeyToken=96d09a1eb7f44a77, processorArchitecture=MSIL">
-      <HintPath>..\packages\NUnit.2.6.3\lib\nunit.framework.dll</HintPath>
-      <Private>True</Private>
-    </Reference>
-    <Reference Include="System" />
-    <Reference Include="System.Core" />
-    <Reference Include="System.Xml.Linq" />
-    <Reference Include="System.Data.DataSetExtensions" />
-    <Reference Include="Microsoft.CSharp" />
-    <Reference Include="System.Data" />
-    <Reference Include="System.Xml" />
-  </ItemGroup>
-  <ItemGroup>
-    <Compile Include="Analyzing\TestAnalyzingQueryParser.cs" />
-    <Compile Include="Classic\TestMultiFieldQueryParser.cs" />
-    <Compile Include="Classic\TestMultiPhraseQueryParsing.cs" />
-    <Compile Include="Classic\TestQueryParser.cs" />
-    <Compile Include="ComplexPhrase\TestComplexPhraseQuery.cs" />
-    <Compile Include="Ext\ExtensionStub.cs" />
-    <Compile Include="Ext\TestExtendableQueryParser.cs" />
-    <Compile Include="Ext\TestExtensions.cs" />
-    <Compile Include="Properties\AssemblyInfo.cs" />
-    <Compile Include="Classic\TestMultiAnalyzer.cs" />
-    <Compile Include="Simple\TestSimpleQueryParser.cs" />
-    <Compile Include="Surround\Query\BooleanQueryTst.cs" />
-    <Compile Include="Surround\Query\ExceptionQueryTst.cs" />
-    <Compile Include="Surround\Query\SingleFieldTestDb.cs" />
-    <Compile Include="Surround\Query\SrndQueryTest.cs" />
-    <Compile Include="Surround\Query\Test01Exceptions.cs" />
-    <Compile Include="Surround\Query\Test02Boolean.cs" />
-    <Compile Include="Surround\Query\Test03Distance.cs" />
-    <Compile Include="Util\QueryParserTestBase.cs" />
-  </ItemGroup>
-  <ItemGroup>
-    <None Include="packages.config" />
-  </ItemGroup>
-  <ItemGroup>
-    <ProjectReference Include="..\Lucene.Net.QueryParser\Lucene.Net.QueryParser.csproj">
-      <Project>{949ba34b-6ae6-4ce3-b578-61e13e4d76bf}</Project>
-      <Name>Lucene.Net.QueryParser</Name>
-    </ProjectReference>
-    <ProjectReference Include="..\src\Lucene.Net.Analysis.Common\Lucene.Net.Analysis.Common.csproj">
-      <Project>{4add0bbc-b900-4715-9526-d871de8eea64}</Project>
-      <Name>Lucene.Net.Analysis.Common</Name>
-    </ProjectReference>
-    <ProjectReference Include="..\src\Lucene.Net.Core\Lucene.Net.csproj">
-      <Project>{5d4ad9be-1ffb-41ab-9943-25737971bf57}</Project>
-      <Name>Lucene.Net</Name>
-    </ProjectReference>
-    <ProjectReference Include="..\src\Lucene.Net.TestFramework\Lucene.Net.TestFramework.csproj">
-      <Project>{b2c0d749-ce34-4f62-a15e-00cb2ff5ddb3}</Project>
-      <Name>Lucene.Net.TestFramework</Name>
-    </ProjectReference>
-  </ItemGroup>
-  <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
-  <!-- To modify your build process, add your task inside one of the targets below and uncomment it. 
-       Other similar extension points exist, see Microsoft.Common.targets.
-  <Target Name="BeforeBuild">
-  </Target>
-  <Target Name="AfterBuild">
-  </Target>
-  -->
-</Project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Properties/AssemblyInfo.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Properties/AssemblyInfo.cs b/Lucene.Net.Tests.QueryParser/Properties/AssemblyInfo.cs
deleted file mode 100644
index 549c7bf..0000000
--- a/Lucene.Net.Tests.QueryParser/Properties/AssemblyInfo.cs
+++ /dev/null
@@ -1,36 +0,0 @@
-\ufeffusing System.Reflection;
-using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
-
-// General Information about an assembly is controlled through the following 
-// set of attributes. Change these attribute values to modify the information
-// associated with an assembly.
-[assembly: AssemblyTitle("Lucene.Net.Tests.QueryParser")]
-[assembly: AssemblyDescription("")]
-[assembly: AssemblyConfiguration("")]
-[assembly: AssemblyCompany("")]
-[assembly: AssemblyProduct("Lucene.Net.Tests.QueryParser")]
-[assembly: AssemblyCopyright("Copyright �  2016")]
-[assembly: AssemblyTrademark("")]
-[assembly: AssemblyCulture("")]
-
-// Setting ComVisible to false makes the types in this assembly not visible 
-// to COM components.  If you need to access a type in this assembly from 
-// COM, set the ComVisible attribute to true on that type.
-[assembly: ComVisible(false)]
-
-// The following GUID is for the ID of the typelib if this project is exposed to COM
-[assembly: Guid("27d0ae76-3e51-454c-9c4a-f913fde0ed0a")]
-
-// Version information for an assembly consists of the following four values:
-//
-//      Major Version
-//      Minor Version 
-//      Build Number
-//      Revision
-//
-// You can specify all the values or you can default the Build and Revision Numbers 
-// by using the '*' as shown below:
-// [assembly: AssemblyVersion("1.0.*")]
-[assembly: AssemblyVersion("1.0.0.0")]
-[assembly: AssemblyFileVersion("1.0.0.0")]


[12/50] [abbrv] lucenenet git commit: Ported QueryParser.ComplexPhrase namespace + tests.

Posted by sy...@apache.org.
Ported QueryParser.ComplexPhrase namespace + tests.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/071b60ce
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/071b60ce
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/071b60ce

Branch: refs/heads/master
Commit: 071b60ce871c174f356f65c1e6e96eb2f604b434
Parents: 11d7449
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Mon Aug 1 03:03:29 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:30:25 2016 +0700

----------------------------------------------------------------------
 .../Classic/QueryParserBase.cs                  |   2 +-
 .../ComplexPhrase/ComplexPhraseQueryParser.cs   | 468 +++++++++++++++++++
 .../Lucene.Net.QueryParser.csproj               |   1 +
 .../ComplexPhrase/TestComplexPhraseQuery.cs     | 214 +++++++++
 .../Lucene.Net.Tests.QueryParser.csproj         |   1 +
 5 files changed, 685 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/071b60ce/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/QueryParserBase.cs b/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
index 0449187..599110e 100644
--- a/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
+++ b/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
@@ -164,7 +164,7 @@ namespace Lucene.Net.QueryParser.Classic
         /// </remarks>
         /// <param name="query">the query string to be parsed.</param>
         /// <returns></returns>
-        public Query Parse(string query)
+        public virtual Query Parse(string query)
         {
             ReInit(new FastCharStream(new StringReader(query)));
             try

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/071b60ce/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs b/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
new file mode 100644
index 0000000..0ac7c5b
--- /dev/null
+++ b/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
@@ -0,0 +1,468 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Index;
+using Lucene.Net.QueryParser.Classic;
+using Lucene.Net.Search;
+using Lucene.Net.Search.Spans;
+using Lucene.Net.Util;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Lucene.Net.QueryParser.ComplexPhrase
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// QueryParser which permits complex phrase query syntax eg "(john jon
+    /// jonathan~) peters*".
+    /// <p>
+    /// Performs potentially multiple passes over Query text to parse any nested
+    /// logic in PhraseQueries. - First pass takes any PhraseQuery content between
+    /// quotes and stores for subsequent pass. All other query content is parsed as
+    /// normal - Second pass parses any stored PhraseQuery content, checking all
+    /// embedded clauses are referring to the same field and therefore can be
+    /// rewritten as Span queries. All PhraseQuery clauses are expressed as
+    /// ComplexPhraseQuery objects
+    /// </p>
+    /// <p>
+    /// This could arguably be done in one pass using a new QueryParser but here I am
+    /// working within the constraints of the existing parser as a base class. This
+    /// currently simply feeds all phrase content through an analyzer to select
+    /// phrase terms - any "special" syntax such as * ~ * etc are not given special
+    /// status
+    /// </p>
+    /// </summary>
+    public class ComplexPhraseQueryParser : Classic.QueryParser
+    {
+        private List<ComplexPhraseQuery> complexPhrases = null;
+
+        private bool isPass2ResolvingPhrases;
+
+        /// <summary>
+        /// When <code>inOrder</code> is true, the search terms must
+        /// exists in the documents as the same order as in query.
+        /// Choose between ordered (true) or un-ordered (false) proximity search.
+        /// </summary>
+        public bool InOrder { get; set; }
+
+        private ComplexPhraseQuery currentPhraseQuery = null;
+
+        public ComplexPhraseQueryParser(LuceneVersion matchVersion, string f, Analyzer a)
+            : base(matchVersion, f, a)
+        {
+            // set property defaults
+            this.InOrder = true;
+        }
+
+        protected internal override Query GetFieldQuery(string field, string queryText, int slop)
+        {
+            ComplexPhraseQuery cpq = new ComplexPhraseQuery(field, queryText, slop, InOrder);
+            complexPhrases.Add(cpq); // add to list of phrases to be parsed once
+            // we
+            // are through with this pass
+            return cpq;
+        }
+
+        public override Query Parse(string query)
+        {
+            if (isPass2ResolvingPhrases)
+            {
+                MultiTermQuery.RewriteMethod oldMethod = MultiTermRewriteMethod;
+                try
+                {
+                    // Temporarily force BooleanQuery rewrite so that Parser will
+                    // generate visible
+                    // collection of terms which we can convert into SpanQueries.
+                    // ConstantScoreRewrite mode produces an
+                    // opaque ConstantScoreQuery object which cannot be interrogated for
+                    // terms in the same way a BooleanQuery can.
+                    // QueryParser is not guaranteed threadsafe anyway so this temporary
+                    // state change should not
+                    // present an issue
+                    MultiTermRewriteMethod = MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE;
+                    return base.Parse(query);
+                }
+                finally
+                {
+                    MultiTermRewriteMethod = oldMethod;
+                }
+            }
+
+            // First pass - parse the top-level query recording any PhraseQuerys
+            // which will need to be resolved
+            complexPhrases = new List<ComplexPhraseQuery>();
+            Query q = base.Parse(query);
+
+            // Perform second pass, using this QueryParser to parse any nested
+            // PhraseQueries with different
+            // set of syntax restrictions (i.e. all fields must be same)
+            isPass2ResolvingPhrases = true;
+            try
+            {
+                foreach (var currentPhraseQuery in complexPhrases)
+                {
+                    this.currentPhraseQuery = currentPhraseQuery;
+                    // in each phrase, now parse the contents between quotes as a
+                    // separate parse operation
+                    currentPhraseQuery.ParsePhraseElements(this);
+                }
+            }
+            finally
+            {
+                isPass2ResolvingPhrases = false;
+            }
+            return q;
+        }
+
+        // There is No "getTermQuery throws ParseException" method to override so
+        // unfortunately need
+        // to throw a runtime exception here if a term for another field is embedded
+        // in phrase query
+        protected override Query NewTermQuery(Term term)
+        {
+            if (isPass2ResolvingPhrases)
+            {
+                try
+                {
+                    CheckPhraseClauseIsForSameField(term.Field);
+                }
+                catch (ParseException pe)
+                {
+                    throw new Exception("Error parsing complex phrase", pe);
+                }
+            }
+            return base.NewTermQuery(term);
+        }
+
+        // Helper method used to report on any clauses that appear in query syntax
+        private void CheckPhraseClauseIsForSameField(string field)
+        {
+            if (!field.Equals(currentPhraseQuery.Field))
+            {
+                throw new ParseException("Cannot have clause for field \"" + field
+                    + "\" nested in phrase " + " for field \"" + currentPhraseQuery.Field
+                    + "\"");
+            }
+        }
+
+        protected internal override Query GetWildcardQuery(string field, string termStr)
+        {
+            if (isPass2ResolvingPhrases)
+            {
+                CheckPhraseClauseIsForSameField(field);
+            }
+            return base.GetWildcardQuery(field, termStr);
+        }
+
+        protected internal override Query GetRangeQuery(string field, string part1, string part2, bool startInclusive, bool endInclusive)
+        {
+            if (isPass2ResolvingPhrases)
+            {
+                CheckPhraseClauseIsForSameField(field);
+            }
+            return base.GetRangeQuery(field, part1, part2, startInclusive, endInclusive);
+        }
+
+        protected internal override Query NewRangeQuery(string field, string part1, string part2, bool startInclusive, bool endInclusive)
+        {
+            if (isPass2ResolvingPhrases)
+            {
+                // Must use old-style RangeQuery in order to produce a BooleanQuery
+                // that can be turned into SpanOr clause
+                TermRangeQuery rangeQuery = TermRangeQuery.NewStringRange(field, part1, part2, startInclusive, endInclusive);
+                rangeQuery.SetRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+                return rangeQuery;
+            }
+            return base.NewRangeQuery(field, part1, part2, startInclusive, endInclusive);
+        }
+
+        protected internal override Query GetFuzzyQuery(string field, string termStr, float minSimilarity)
+        {
+            if (isPass2ResolvingPhrases)
+            {
+                CheckPhraseClauseIsForSameField(field);
+            }
+            return base.GetFuzzyQuery(field, termStr, minSimilarity);
+        }
+
+        /// <summary>
+        /// Used to handle the query content in between quotes and produced Span-based
+        /// interpretations of the clauses.
+        /// </summary>
+        public class ComplexPhraseQuery : Query
+        {
+            private readonly string field;
+            private readonly string phrasedQueryStringContents;
+            private readonly int slopFactor;
+            private readonly bool inOrder;
+            private Query contents;
+
+            public ComplexPhraseQuery(string field, string phrasedQueryStringContents,
+                int slopFactor, bool inOrder)
+            {
+                this.field = field;
+                this.phrasedQueryStringContents = phrasedQueryStringContents;
+                this.slopFactor = slopFactor;
+                this.inOrder = inOrder;
+            }
+
+            public string Field
+            {
+                get { return field; }
+            }
+
+            // Called by ComplexPhraseQueryParser for each phrase after the main
+            // parse
+            // thread is through
+            protected internal void ParsePhraseElements(ComplexPhraseQueryParser qp)
+            {
+                // TODO ensure that field-sensitivity is preserved ie the query
+                // string below is parsed as
+                // field+":("+phrasedQueryStringContents+")"
+                // but this will need code in rewrite to unwrap the first layer of
+                // boolean query
+
+                string oldDefaultParserField = qp.Field;
+                try
+                {
+                    //temporarily set the QueryParser to be parsing the default field for this phrase e.g author:"fred* smith"
+                    qp.field = this.field;
+                    contents = qp.Parse(phrasedQueryStringContents);
+                }
+                finally
+                {
+                    qp.field = oldDefaultParserField;
+                }
+            }
+
+            public override Query Rewrite(IndexReader reader)
+            {
+                // ArrayList spanClauses = new ArrayList();
+                if (contents is TermQuery)
+                {
+                    return contents;
+                }
+                // Build a sequence of Span clauses arranged in a SpanNear - child
+                // clauses can be complex
+                // Booleans e.g. nots and ors etc
+                int numNegatives = 0;
+                if (!(contents is BooleanQuery))
+                {
+                    throw new ArgumentException("Unknown query type \""
+                        + contents.GetType().Name
+                        + "\" found in phrase query string \"" + phrasedQueryStringContents
+                        + "\"");
+                }
+                BooleanQuery bq = (BooleanQuery)contents;
+                BooleanClause[] bclauses = bq.Clauses;
+                SpanQuery[] allSpanClauses = new SpanQuery[bclauses.Length];
+                // For all clauses e.g. one* two~
+                for (int i = 0; i < bclauses.Length; i++)
+                {
+                    // HashSet bclauseterms=new HashSet();
+                    Query qc = bclauses[i].Query;
+                    // Rewrite this clause e.g one* becomes (one OR onerous)
+                    qc = qc.Rewrite(reader);
+                    if (bclauses[i].Occur_.Equals(BooleanClause.Occur.MUST_NOT))
+                    {
+                        numNegatives++;
+                    }
+
+                    if (qc is BooleanQuery)
+                    {
+                        List<SpanQuery> sc = new List<SpanQuery>();
+                        AddComplexPhraseClause(sc, (BooleanQuery)qc);
+                        if (sc.Count > 0)
+                        {
+                            allSpanClauses[i] = sc.ElementAt(0);
+                        }
+                        else
+                        {
+                            // Insert fake term e.g. phrase query was for "Fred Smithe*" and
+                            // there were no "Smithe*" terms - need to
+                            // prevent match on just "Fred".
+                            allSpanClauses[i] = new SpanTermQuery(new Term(field,
+                                "Dummy clause because no terms found - must match nothing"));
+                        }
+                    }
+                    else
+                    {
+                        if (qc is TermQuery)
+                        {
+                            TermQuery tq = (TermQuery)qc;
+                            allSpanClauses[i] = new SpanTermQuery(tq.Term);
+                        }
+                        else
+                        {
+                            throw new ArgumentException("Unknown query type \""
+                                + qc.GetType().Name
+                                + "\" found in phrase query string \""
+                                + phrasedQueryStringContents + "\"");
+                        }
+
+                    }
+                }
+                if (numNegatives == 0)
+                {
+                    // The simple case - no negative elements in phrase
+                    return new SpanNearQuery(allSpanClauses, slopFactor, inOrder);
+                }
+                // Complex case - we have mixed positives and negatives in the
+                // sequence.
+                // Need to return a SpanNotQuery
+                List<SpanQuery> positiveClauses = new List<SpanQuery>();
+                for (int j = 0; j < allSpanClauses.Length; j++)
+                {
+                    if (!bclauses[j].Occur_.Equals(BooleanClause.Occur.MUST_NOT))
+                    {
+                        positiveClauses.Add(allSpanClauses[j]);
+                    }
+                }
+
+                SpanQuery[] includeClauses = positiveClauses
+                    .ToArray();
+
+                SpanQuery include = null;
+                if (includeClauses.Length == 1)
+                {
+                    include = includeClauses[0]; // only one positive clause
+                }
+                else
+                {
+                    // need to increase slop factor based on gaps introduced by
+                    // negatives
+                    include = new SpanNearQuery(includeClauses, slopFactor + numNegatives,
+                        inOrder);
+                }
+                // Use sequence of positive and negative values as the exclude.
+                SpanNearQuery exclude = new SpanNearQuery(allSpanClauses, slopFactor,
+                    inOrder);
+                SpanNotQuery snot = new SpanNotQuery(include, exclude);
+                return snot;
+            }
+
+            private void AddComplexPhraseClause(List<SpanQuery> spanClauses, BooleanQuery qc)
+            {
+                List<SpanQuery> ors = new List<SpanQuery>();
+                List<SpanQuery> nots = new List<SpanQuery>();
+                BooleanClause[] bclauses = qc.Clauses;
+
+                // For all clauses e.g. one* two~
+                for (int i = 0; i < bclauses.Length; i++)
+                {
+                    Query childQuery = bclauses[i].Query;
+
+                    // select the list to which we will add these options
+                    List<SpanQuery> chosenList = ors;
+                    if (bclauses[i].Occur_ == BooleanClause.Occur.MUST_NOT)
+                    {
+                        chosenList = nots;
+                    }
+
+                    if (childQuery is TermQuery)
+                    {
+                        TermQuery tq = (TermQuery)childQuery;
+                        SpanTermQuery stq = new SpanTermQuery(tq.Term);
+                        stq.Boost = tq.Boost;
+                        chosenList.Add(stq);
+                    }
+                    else if (childQuery is BooleanQuery)
+                    {
+                        BooleanQuery cbq = (BooleanQuery)childQuery;
+                        AddComplexPhraseClause(chosenList, cbq);
+                    }
+                    else
+                    {
+                        // LUCENETODO alternatively could call extract terms here?
+                        throw new ArgumentException("Unknown query type:"
+                            + childQuery.GetType().Name);
+                    }
+                }
+                if (ors.Count == 0)
+                {
+                    return;
+                }
+                SpanOrQuery soq = new SpanOrQuery(ors
+                    .ToArray());
+                if (nots.Count == 0)
+                {
+                    spanClauses.Add(soq);
+                }
+                else
+                {
+                    SpanOrQuery snqs = new SpanOrQuery(nots
+                        .ToArray());
+                    SpanNotQuery snq = new SpanNotQuery(soq, snqs);
+                    spanClauses.Add(snq);
+                }
+            }
+
+            public override string ToString(string field)
+            {
+                return "\"" + phrasedQueryStringContents + "\"";
+            }
+
+            public override int GetHashCode()
+            {
+                int prime = 31;
+                int result = base.GetHashCode();
+                result = prime * result + ((field == null) ? 0 : field.GetHashCode());
+                result = prime
+                    * result
+                    + ((phrasedQueryStringContents == null) ? 0
+                        : phrasedQueryStringContents.GetHashCode());
+                result = prime * result + slopFactor;
+                result = prime * result + (inOrder ? 1 : 0);
+                return result;
+            }
+
+            public override bool Equals(object obj)
+            {
+                if (this == obj)
+                    return true;
+                if (obj == null)
+                    return false;
+                if (GetType() != obj.GetType())
+                    return false;
+                if (!base.Equals(obj))
+                {
+                    return false;
+                }
+                ComplexPhraseQuery other = (ComplexPhraseQuery)obj;
+                if (field == null)
+                {
+                    if (other.field != null)
+                        return false;
+                }
+                else if (!field.Equals(other.field))
+                    return false;
+                if (phrasedQueryStringContents == null)
+                {
+                    if (other.phrasedQueryStringContents != null)
+                        return false;
+                }
+                else if (!phrasedQueryStringContents
+                  .Equals(other.phrasedQueryStringContents))
+                    return false;
+                if (slopFactor != other.slopFactor)
+                    return false;
+                return inOrder == other.inOrder;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/071b60ce/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj b/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
index 2c0619c..0b18336 100644
--- a/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
+++ b/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
@@ -50,6 +50,7 @@
     <Compile Include="Classic\QueryParserTokenManager.cs" />
     <Compile Include="Classic\Token.cs" />
     <Compile Include="Classic\TokenMgrError.cs" />
+    <Compile Include="ComplexPhrase\ComplexPhraseQueryParser.cs" />
     <Compile Include="Flexible\Standard\CommonQueryParserConfiguration.cs" />
     <Compile Include="Properties\AssemblyInfo.cs" />
   </ItemGroup>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/071b60ce/Lucene.Net.Tests.QueryParser/ComplexPhrase/TestComplexPhraseQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/ComplexPhrase/TestComplexPhraseQuery.cs b/Lucene.Net.Tests.QueryParser/ComplexPhrase/TestComplexPhraseQuery.cs
new file mode 100644
index 0000000..2c2d6e2
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/ComplexPhrase/TestComplexPhraseQuery.cs
@@ -0,0 +1,214 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Store;
+using Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.QueryParser.ComplexPhrase
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestComplexPhraseQuery : LuceneTestCase
+    {
+        Directory rd;
+        Analyzer analyzer;
+        DocData[] docsContent = {
+            new DocData("john smith", "1", "developer"),
+            new DocData("johathon smith", "2", "developer"),
+            new DocData("john percival smith", "3", "designer"),
+            new DocData("jackson waits tom", "4", "project manager")
+        };
+
+        private IndexSearcher searcher;
+        private IndexReader reader;
+
+        string defaultFieldName = "name";
+
+        bool inOrder = true;
+
+        [Test]
+        public void TestComplexPhrases()
+        {
+            CheckMatches("\"john smith\"", "1"); // Simple multi-term still works
+            CheckMatches("\"j*   smyth~\"", "1,2"); // wildcards and fuzzies are OK in
+            // phrases
+            CheckMatches("\"(jo* -john)  smith\"", "2"); // boolean logic works
+            CheckMatches("\"jo*  smith\"~2", "1,2,3"); // position logic works.
+            CheckMatches("\"jo* [sma TO smZ]\" ", "1,2"); // range queries supported
+            CheckMatches("\"john\"", "1,3"); // Simple single-term still works
+            CheckMatches("\"(john OR johathon)  smith\"", "1,2"); // boolean logic with
+            // brackets works.
+            CheckMatches("\"(jo* -john) smyth~\"", "2"); // boolean logic with
+            // brackets works.
+
+            // CheckMatches("\"john -percival\"", "1"); // not logic doesn't work
+            // currently :(.
+
+            CheckMatches("\"john  nosuchword*\"", ""); // phrases with clauses producing
+            // empty sets
+
+            CheckBadQuery("\"jo*  id:1 smith\""); // mixing fields in a phrase is bad
+            CheckBadQuery("\"jo* \"smith\" \""); // phrases inside phrases is bad
+        }
+
+        [Test]
+        public void TestUnOrderedProximitySearches()
+        {
+            inOrder = true;
+            CheckMatches("\"smith jo*\"~2", ""); // ordered proximity produces empty set
+
+            inOrder = false;
+            CheckMatches("\"smith jo*\"~2", "1,2,3"); // un-ordered proximity
+        }
+
+        private void CheckBadQuery(String qString)
+        {
+            ComplexPhraseQueryParser qp = new ComplexPhraseQueryParser(TEST_VERSION_CURRENT, defaultFieldName, analyzer);
+            qp.InOrder = inOrder;
+            Exception expected = null;
+            try
+            {
+                qp.Parse(qString);
+            }
+            catch (Exception e)
+            {
+                expected = e;
+            }
+            assertNotNull("Expected parse error in " + qString, expected);
+        }
+
+        private void CheckMatches(string qString, string expectedVals)
+        {
+            ComplexPhraseQueryParser qp = new ComplexPhraseQueryParser(TEST_VERSION_CURRENT, defaultFieldName, analyzer);
+            qp.InOrder = inOrder;
+            qp.FuzzyPrefixLength = 1; // usually a good idea
+
+            Query q = qp.Parse(qString);
+
+            HashSet<string> expecteds = new HashSet<string>();
+            string[] vals = expectedVals.Split(new char[] {','}, StringSplitOptions.RemoveEmptyEntries);
+            for (int i = 0; i < vals.Length; i++)
+            {
+                if (vals[i].Length > 0)
+                    expecteds.Add(vals[i]);
+            }
+
+            TopDocs td = searcher.Search(q, 10);
+            ScoreDoc[] sd = td.ScoreDocs;
+            for (int i = 0; i < sd.Length; i++)
+            {
+                Document doc = searcher.Doc(sd[i].Doc);
+                string id = doc.Get("id");
+                assertTrue(qString + "matched doc#" + id + " not expected", expecteds
+                    .Contains(id));
+                expecteds.Remove(id);
+            }
+
+            assertEquals(qString + " missing some matches ", 0, expecteds.Count);
+        }
+
+        [Test]
+        public void TestFieldedQuery()
+        {
+            CheckMatches("name:\"john smith\"", "1");
+            CheckMatches("name:\"j*   smyth~\"", "1,2");
+            CheckMatches("role:\"developer\"", "1,2");
+            CheckMatches("role:\"p* manager\"", "4");
+            CheckMatches("role:de*", "1,2,3");
+            CheckMatches("name:\"j* smyth~\"~5", "1,2,3");
+            CheckMatches("role:\"p* manager\" AND name:jack*", "4");
+            CheckMatches("+role:developer +name:jack*", "");
+            CheckMatches("name:\"john smith\"~2 AND role:designer AND id:3", "3");
+        }
+
+        [Test]
+        public void TestHashcodeEquals()
+        {
+            ComplexPhraseQueryParser qp = new ComplexPhraseQueryParser(TEST_VERSION_CURRENT, defaultFieldName, analyzer);
+            qp.InOrder = true;
+            qp.FuzzyPrefixLength = 1;
+
+            String qString = "\"aaa* bbb*\"";
+
+            Query q = qp.Parse(qString);
+            Query q2 = qp.Parse(qString);
+
+            assertEquals(q.GetHashCode(), q2.GetHashCode());
+            assertEquals(q, q2);
+
+            qp.InOrder = (false); // SOLR-6011
+
+            q2 = qp.Parse(qString);
+
+            // although the general contract of hashCode can't guarantee different values, if we only change one thing
+            // about a single query, it normally should result in a different value (and will with the current
+            // implementation in ComplexPhraseQuery)
+            assertTrue(q.GetHashCode() != q2.GetHashCode());
+            assertTrue(!q.equals(q2));
+            assertTrue(!q2.equals(q));
+        }
+
+        public override void SetUp()
+        {
+            base.SetUp();
+
+            analyzer = new MockAnalyzer(Random());
+            rd = NewDirectory();
+            using (IndexWriter w = new IndexWriter(rd, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)))
+            {
+                for (int i = 0; i < docsContent.Length; i++)
+                {
+                    Document doc = new Document();
+                    doc.Add(NewTextField("name", docsContent[i].Name, Field.Store.YES));
+                    doc.Add(NewTextField("id", docsContent[i].Id, Field.Store.YES));
+                    doc.Add(NewTextField("role", docsContent[i].Role, Field.Store.YES));
+                    w.AddDocument(doc);
+                }
+            }
+            reader = DirectoryReader.Open(rd);
+            searcher = NewSearcher(reader);
+        }
+
+        public override void TearDown()
+        {
+            reader.Dispose();
+            rd.Dispose();
+            base.TearDown();
+        }
+
+
+        private class DocData
+        {
+            public DocData(string name, string id, string role)
+            {
+                this.Name = name;
+                this.Id = id;
+                this.Role = role;
+            }
+
+            public string Name { get; private set; }
+            public string Id { get; private set; }
+            public string Role { get; private set; }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/071b60ce/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj b/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
index 0f9e86c..b263dc8 100644
--- a/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
+++ b/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
@@ -47,6 +47,7 @@
     <Compile Include="Classic\TestMultiFieldQueryParser.cs" />
     <Compile Include="Classic\TestMultiPhraseQueryParsing.cs" />
     <Compile Include="Classic\TestQueryParser.cs" />
+    <Compile Include="ComplexPhrase\TestComplexPhraseQuery.cs" />
     <Compile Include="Properties\AssemblyInfo.cs" />
     <Compile Include="Classic\TestMultiAnalyzer.cs" />
     <Compile Include="Util\QueryParserTestBase.cs" />


[06/50] [abbrv] lucenenet git commit: Fixed issue with attributes not being set at the class level in MockCJKSynonymFilter.

Posted by sy...@apache.org.
Fixed issue with attributes not being set at the class level in MockCJKSynonymFilter.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/cfacfbfb
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/cfacfbfb
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/cfacfbfb

Branch: refs/heads/master
Commit: cfacfbfb9f7b650f02fc4e892f62b801c572af29
Parents: 544c6d4
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Jul 31 19:15:57 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:29:56 2016 +0700

----------------------------------------------------------------------
 .../Classic/TestQueryParser.cs                  | 26 ++++++++++----------
 1 file changed, 13 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/cfacfbfb/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs b/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
index d0e222d..495391a 100644
--- a/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
+++ b/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
@@ -401,33 +401,33 @@ namespace Lucene.Net.QueryParser.Classic
         /// <summary>
         /// adds synonym of "\u570b" for "\u56fd".
         /// </summary>
-        protected class MockCJKSynonymFilter : TokenFilter
+        protected internal class MockCJKSynonymFilter : TokenFilter
         {
-            ICharTermAttribute termAtt;
-            IPositionIncrementAttribute posIncAtt;
-            bool addSynonym = false;
+            internal ICharTermAttribute TermAtt;
+            internal IPositionIncrementAttribute PosIncAtt;
+            internal bool AddSynonym = false;
 
             public MockCJKSynonymFilter(TokenStream input)
                 : base(input)
             {
-                ICharTermAttribute termAtt = AddAttribute<ICharTermAttribute>();
-                IPositionIncrementAttribute posIncAtt = AddAttribute<IPositionIncrementAttribute>();
+                TermAtt = AddAttribute<ICharTermAttribute>();
+                PosIncAtt = AddAttribute<IPositionIncrementAttribute>();
             }
 
-            public override sealed bool IncrementToken()
+            public sealed override bool IncrementToken()
             {
-                if (addSynonym)
-                { // inject our synonym
+                if (AddSynonym) // inject our synonym
+                {
                     ClearAttributes();
-                    termAtt.SetEmpty().Append("\u570b");
-                    posIncAtt.PositionIncrement = (0);
-                    addSynonym = false;
+                    TermAtt.SetEmpty().Append("\u570b");
+                    PosIncAtt.PositionIncrement = 0;
+                    AddSynonym = false;
                     return true;
                 }
 
                 if (input.IncrementToken())
                 {
-                    addSynonym = termAtt.toString().equals("\u56fd");
+                    AddSynonym = TermAtt.ToString().Equals("\u56fd");
                     return true;
                 }
                 else


[29/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Simple/TestSimpleQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Simple/TestSimpleQueryParser.cs b/Lucene.Net.Tests.QueryParser/Simple/TestSimpleQueryParser.cs
deleted file mode 100644
index 0a9d49f..0000000
--- a/Lucene.Net.Tests.QueryParser/Simple/TestSimpleQueryParser.cs
+++ /dev/null
@@ -1,728 +0,0 @@
-\ufeffusing Lucene.Net.Analysis;
-using Lucene.Net.Index;
-using Lucene.Net.Search;
-using Lucene.Net.Support;
-using Lucene.Net.Util;
-using Lucene.Net.Util.Automaton;
-using NUnit.Framework;
-using System.Collections.Generic;
-using System.Text;
-
-namespace Lucene.Net.QueryParser.Simple
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// Tests for <see cref="SimpleQueryParser"/>
-    /// </summary>
-    [TestFixture]
-    public class TestSimpleQueryParser : LuceneTestCase
-    {
-        /// <summary>
-        /// helper to parse a query with whitespace+lowercase analyzer across "field",
-        /// with default operator of MUST
-        /// </summary>
-        /// <param name="text"></param>
-        /// <returns></returns>
-        private Query Parse(string text)
-        {
-            Analyzer analyzer = new MockAnalyzer(Random());
-            SimpleQueryParser parser = new SimpleQueryParser(analyzer, "field");
-            parser.DefaultOperator = BooleanClause.Occur.MUST;
-            return parser.Parse(text);
-        }
-
-        /// <summary>
-        /// helper to parse a query with whitespace+lowercase analyzer across "field",
-        /// with default operator of MUST
-        /// </summary>
-        /// <param name="text"></param>
-        /// <param name="flags"></param>
-        /// <returns></returns>
-        private Query Parse(string text, int flags)
-        {
-            Analyzer analyzer = new MockAnalyzer(Random());
-            SimpleQueryParser parser = new SimpleQueryParser(analyzer, new HashMap<string, float>() { { "field", 1f } }, flags);
-            parser.DefaultOperator = BooleanClause.Occur.MUST;
-            return parser.Parse(text);
-        }
-
-        /** test a simple term */
-        [Test]
-        public void TestTerm()
-        {
-            Query expected = new TermQuery(new Term("field", "foobar"));
-
-            assertEquals(expected, Parse("foobar"));
-        }
-
-        /** test a fuzzy query */
-        [Test]
-        public void TestFuzzy()
-        {
-            Query regular = new TermQuery(new Term("field", "foobar"));
-            Query expected = new FuzzyQuery(new Term("field", "foobar"), 2);
-
-            assertEquals(expected, Parse("foobar~2"));
-            assertEquals(regular, Parse("foobar~"));
-            assertEquals(regular, Parse("foobar~a"));
-            assertEquals(regular, Parse("foobar~1a"));
-
-            BooleanQuery @bool = new BooleanQuery();
-            FuzzyQuery fuzzy = new FuzzyQuery(new Term("field", "foo"), LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE);
-            @bool.Add(fuzzy, BooleanClause.Occur.MUST);
-            @bool.Add(new TermQuery(new Term("field", "bar")), BooleanClause.Occur.MUST);
-
-            assertEquals(@bool, Parse("foo~" + LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE + 1 + " bar"));
-        }
-
-        /** test a simple phrase */
-        [Test]
-        public void TestPhrase()
-        {
-            PhraseQuery expected = new PhraseQuery();
-            expected.Add(new Term("field", "foo"));
-            expected.Add(new Term("field", "bar"));
-
-            assertEquals(expected, Parse("\"foo bar\""));
-        }
-
-        /** test a simple phrase with various slop settings */
-        [Test]
-        public void TestPhraseWithSlop()
-        {
-            PhraseQuery expectedWithSlop = new PhraseQuery();
-            expectedWithSlop.Add(new Term("field", "foo"));
-            expectedWithSlop.Add(new Term("field", "bar"));
-            expectedWithSlop.Slop = (2);
-
-            assertEquals(expectedWithSlop, Parse("\"foo bar\"~2"));
-
-            PhraseQuery expectedWithMultiDigitSlop = new PhraseQuery();
-            expectedWithMultiDigitSlop.Add(new Term("field", "foo"));
-            expectedWithMultiDigitSlop.Add(new Term("field", "bar"));
-            expectedWithMultiDigitSlop.Slop = (10);
-
-            assertEquals(expectedWithMultiDigitSlop, Parse("\"foo bar\"~10"));
-
-            PhraseQuery expectedNoSlop = new PhraseQuery();
-            expectedNoSlop.Add(new Term("field", "foo"));
-            expectedNoSlop.Add(new Term("field", "bar"));
-
-            assertEquals("Ignore trailing tilde with no slop", expectedNoSlop, Parse("\"foo bar\"~"));
-            assertEquals("Ignore non-numeric trailing slop", expectedNoSlop, Parse("\"foo bar\"~a"));
-            assertEquals("Ignore non-numeric trailing slop", expectedNoSlop, Parse("\"foo bar\"~1a"));
-            assertEquals("Ignore negative trailing slop", expectedNoSlop, Parse("\"foo bar\"~-1"));
-
-            PhraseQuery pq = new PhraseQuery();
-            pq.Add(new Term("field", "foo"));
-            pq.Add(new Term("field", "bar"));
-            pq.Slop = (12);
-
-            BooleanQuery expectedBoolean = new BooleanQuery();
-            expectedBoolean.Add(pq, BooleanClause.Occur.MUST);
-            expectedBoolean.Add(new TermQuery(new Term("field", "baz")), BooleanClause.Occur.MUST);
-
-            assertEquals(expectedBoolean, Parse("\"foo bar\"~12 baz"));
-        }
-
-        /** test a simple prefix */
-        [Test]
-        public void TestPrefix()
-        {
-            PrefixQuery expected = new PrefixQuery(new Term("field", "foobar"));
-
-            assertEquals(expected, Parse("foobar*"));
-        }
-
-        /** test some AND'd terms using '+' operator */
-        [Test]
-        public void TestAND()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.MUST);
-            expected.Add(new TermQuery(new Term("field", "bar")), BooleanClause.Occur.MUST);
-
-            assertEquals(expected, Parse("foo+bar"));
-        }
-
-        /** test some AND'd phrases using '+' operator */
-        [Test]
-        public void TestANDPhrase()
-        {
-            PhraseQuery phrase1 = new PhraseQuery();
-            phrase1.Add(new Term("field", "foo"));
-            phrase1.Add(new Term("field", "bar"));
-            PhraseQuery phrase2 = new PhraseQuery();
-            phrase2.Add(new Term("field", "star"));
-            phrase2.Add(new Term("field", "wars"));
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(phrase1, BooleanClause.Occur.MUST);
-            expected.Add(phrase2, BooleanClause.Occur.MUST);
-
-            assertEquals(expected, Parse("\"foo bar\"+\"star wars\""));
-        }
-
-        /** test some AND'd terms (just using whitespace) */
-        [Test]
-        public void TestANDImplicit()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.MUST);
-            expected.Add(new TermQuery(new Term("field", "bar")), BooleanClause.Occur.MUST);
-
-            assertEquals(expected, Parse("foo bar"));
-        }
-
-        /** test some OR'd terms */
-        [Test]
-        public void TestOR()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.SHOULD);
-            expected.Add(new TermQuery(new Term("field", "bar")), BooleanClause.Occur.SHOULD);
-
-            assertEquals(expected, Parse("foo|bar"));
-            assertEquals(expected, Parse("foo||bar"));
-        }
-
-        /** test some OR'd terms (just using whitespace) */
-        [Test]
-        public void TestORImplicit()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.SHOULD);
-            expected.Add(new TermQuery(new Term("field", "bar")), BooleanClause.Occur.SHOULD);
-
-            SimpleQueryParser parser = new SimpleQueryParser(new MockAnalyzer(Random()), "field");
-            assertEquals(expected, parser.Parse("foo bar"));
-        }
-
-        /** test some OR'd phrases using '|' operator */
-        [Test]
-        public void TestORPhrase()
-        {
-            PhraseQuery phrase1 = new PhraseQuery();
-            phrase1.Add(new Term("field", "foo"));
-            phrase1.Add(new Term("field", "bar"));
-            PhraseQuery phrase2 = new PhraseQuery();
-            phrase2.Add(new Term("field", "star"));
-            phrase2.Add(new Term("field", "wars"));
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(phrase1, BooleanClause.Occur.SHOULD);
-            expected.Add(phrase2, BooleanClause.Occur.SHOULD);
-
-            assertEquals(expected, Parse("\"foo bar\"|\"star wars\""));
-        }
-
-        /** test negated term */
-        [Test]
-        public void TestNOT()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.MUST_NOT);
-            expected.Add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
-
-            assertEquals(expected, Parse("-foo"));
-            assertEquals(expected, Parse("-(foo)"));
-            assertEquals(expected, Parse("---foo"));
-        }
-
-        /** test crazy prefixes with multiple asterisks */
-        [Test]
-        public void TestCrazyPrefixes1()
-        {
-            Query expected = new PrefixQuery(new Term("field", "st*ar"));
-
-            assertEquals(expected, Parse("st*ar*"));
-        }
-
-        /** test prefixes with some escaping */
-        [Test]
-        public void TestCrazyPrefixes2()
-        {
-            Query expected = new PrefixQuery(new Term("field", "st*ar\\*"));
-
-            assertEquals(expected, Parse("st*ar\\\\**"));
-        }
-
-        /** not a prefix query! the prefix operator is escaped */
-        [Test]
-        public void TestTermInDisguise()
-        {
-            Query expected = new TermQuery(new Term("field", "st*ar\\*"));
-
-            assertEquals(expected, Parse("sT*Ar\\\\\\*"));
-        }
-
-        // a number of test cases here have garbage/errors in
-        // the syntax passed in to test that the query can
-        // still be interpreted as a guess to what the human
-        // input was trying to be
-
-        [Test]
-        public void TestGarbageTerm()
-        {
-            Query expected = new TermQuery(new Term("field", "star"));
-
-            assertEquals(expected, Parse("star"));
-            assertEquals(expected, Parse("star\n"));
-            assertEquals(expected, Parse("star\r"));
-            assertEquals(expected, Parse("star\t"));
-            assertEquals(expected, Parse("star("));
-            assertEquals(expected, Parse("star)"));
-            assertEquals(expected, Parse("star\""));
-            assertEquals(expected, Parse("\t \r\n\nstar   \n \r \t "));
-            assertEquals(expected, Parse("- + \"\" - star \\"));
-        }
-
-        [Test]
-        public void TestGarbageEmpty()
-        {
-            assertNull(Parse(""));
-            assertNull(Parse("  "));
-            assertNull(Parse("  "));
-            assertNull(Parse("\\ "));
-            assertNull(Parse("\\ \\ "));
-            assertNull(Parse("\"\""));
-            assertNull(Parse("\" \""));
-            assertNull(Parse("\" \"|\" \""));
-            assertNull(Parse("(\" \"|\" \")"));
-            assertNull(Parse("\" \" \" \""));
-            assertNull(Parse("(\" \" \" \")"));
-        }
-
-        [Test]
-        public void TestGarbageAND()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
-            expected.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
-
-            assertEquals(expected, Parse("star wars"));
-            assertEquals(expected, Parse("star+wars"));
-            assertEquals(expected, Parse("     star     wars   "));
-            assertEquals(expected, Parse("     star +    wars   "));
-            assertEquals(expected, Parse("  |     star + + |   wars   "));
-            assertEquals(expected, Parse("  |     star + + |   wars   \\"));
-        }
-
-        [Test]
-        public void TestGarbageOR()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.SHOULD);
-            expected.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.SHOULD);
-
-            assertEquals(expected, Parse("star|wars"));
-            assertEquals(expected, Parse("     star |    wars   "));
-            assertEquals(expected, Parse("  |     star | + |   wars   "));
-            assertEquals(expected, Parse("  +     star | + +   wars   \\"));
-        }
-
-        [Test]
-        public void TestGarbageNOT()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST_NOT);
-            expected.Add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
-
-            assertEquals(expected, Parse("-star"));
-            assertEquals(expected, Parse("---star"));
-            assertEquals(expected, Parse("- -star -"));
-        }
-
-        [Test]
-        public void TestGarbagePhrase()
-        {
-            PhraseQuery expected = new PhraseQuery();
-            expected.Add(new Term("field", "star"));
-            expected.Add(new Term("field", "wars"));
-
-            assertEquals(expected, Parse("\"star wars\""));
-            assertEquals(expected, Parse("\"star wars\\ \""));
-            assertEquals(expected, Parse("\"\" | \"star wars\""));
-            assertEquals(expected, Parse("          \"star wars\"        \"\"\\"));
-        }
-
-        [Test]
-        public void TestGarbageSubquery()
-        {
-            Query expected = new TermQuery(new Term("field", "star"));
-
-            assertEquals(expected, Parse("(star)"));
-            assertEquals(expected, Parse("(star))"));
-            assertEquals(expected, Parse("((star)"));
-            assertEquals(expected, Parse("     -()(star)        \n\n\r     "));
-            assertEquals(expected, Parse("| + - ( + - |      star    \n      ) \n"));
-        }
-
-        [Test]
-        public void TestCompoundAnd()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
-            expected.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
-            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.MUST);
-
-            assertEquals(expected, Parse("star wars empire"));
-            assertEquals(expected, Parse("star+wars + empire"));
-            assertEquals(expected, Parse(" | --star wars empire \n\\"));
-        }
-
-        [Test]
-        public void TestCompoundOr()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.SHOULD);
-            expected.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.SHOULD);
-            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
-
-            assertEquals(expected, Parse("star|wars|empire"));
-            assertEquals(expected, Parse("star|wars | empire"));
-            assertEquals(expected, Parse(" | --star|wars|empire \n\\"));
-        }
-
-        [Test]
-        public void TestComplex00()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            BooleanQuery inner = new BooleanQuery();
-            inner.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.SHOULD);
-            inner.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.SHOULD);
-            expected.Add(inner, BooleanClause.Occur.MUST);
-            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.MUST);
-
-            assertEquals(expected, Parse("star|wars empire"));
-            assertEquals(expected, Parse("star|wars + empire"));
-            assertEquals(expected, Parse("star| + wars + ----empire |"));
-        }
-
-        [Test]
-        public void TestComplex01()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            BooleanQuery inner = new BooleanQuery();
-            inner.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
-            inner.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
-            expected.Add(inner, BooleanClause.Occur.SHOULD);
-            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
-
-            assertEquals(expected, Parse("star wars | empire"));
-            assertEquals(expected, Parse("star + wars|empire"));
-            assertEquals(expected, Parse("star + | wars | ----empire +"));
-        }
-
-        [Test]
-        public void TestComplex02()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            BooleanQuery inner = new BooleanQuery();
-            inner.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
-            inner.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
-            expected.Add(inner, BooleanClause.Occur.SHOULD);
-            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
-            expected.Add(new TermQuery(new Term("field", "strikes")), BooleanClause.Occur.SHOULD);
-
-            assertEquals(expected, Parse("star wars | empire | strikes"));
-            assertEquals(expected, Parse("star + wars|empire | strikes"));
-            assertEquals(expected, Parse("star + | wars | ----empire | + --strikes \\"));
-        }
-
-        [Test]
-        public void TestComplex03()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            BooleanQuery inner = new BooleanQuery();
-            BooleanQuery inner2 = new BooleanQuery();
-            inner2.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
-            inner2.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
-            inner.Add(inner2, BooleanClause.Occur.SHOULD);
-            inner.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
-            inner.Add(new TermQuery(new Term("field", "strikes")), BooleanClause.Occur.SHOULD);
-            expected.Add(inner, BooleanClause.Occur.MUST);
-            expected.Add(new TermQuery(new Term("field", "back")), BooleanClause.Occur.MUST);
-
-            assertEquals(expected, Parse("star wars | empire | strikes back"));
-            assertEquals(expected, Parse("star + wars|empire | strikes + back"));
-            assertEquals(expected, Parse("star + | wars | ----empire | + --strikes + | --back \\"));
-        }
-
-        [Test]
-        public void TestComplex04()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            BooleanQuery inner = new BooleanQuery();
-            BooleanQuery inner2 = new BooleanQuery();
-            inner.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
-            inner.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
-            inner2.Add(new TermQuery(new Term("field", "strikes")), BooleanClause.Occur.MUST);
-            inner2.Add(new TermQuery(new Term("field", "back")), BooleanClause.Occur.MUST);
-            expected.Add(inner, BooleanClause.Occur.SHOULD);
-            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
-            expected.Add(inner2, BooleanClause.Occur.SHOULD);
-
-            assertEquals(expected, Parse("(star wars) | empire | (strikes back)"));
-            assertEquals(expected, Parse("(star + wars) |empire | (strikes + back)"));
-            assertEquals(expected, Parse("(star + | wars |) | ----empire | + --(strikes + | --back) \\"));
-        }
-
-        [Test]
-        public void TestComplex05()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            BooleanQuery inner1 = new BooleanQuery();
-            BooleanQuery inner2 = new BooleanQuery();
-            BooleanQuery inner3 = new BooleanQuery();
-            BooleanQuery inner4 = new BooleanQuery();
-
-            expected.Add(inner1, BooleanClause.Occur.SHOULD);
-            expected.Add(inner2, BooleanClause.Occur.SHOULD);
-
-            inner1.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
-            inner1.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
-
-            inner2.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
-            inner2.Add(inner3, BooleanClause.Occur.SHOULD);
-
-            inner3.Add(new TermQuery(new Term("field", "strikes")), BooleanClause.Occur.MUST);
-            inner3.Add(new TermQuery(new Term("field", "back")), BooleanClause.Occur.MUST);
-            inner3.Add(inner4, BooleanClause.Occur.MUST);
-
-            inner4.Add(new TermQuery(new Term("field", "jarjar")), BooleanClause.Occur.MUST_NOT);
-            inner4.Add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
-
-            assertEquals(expected, Parse("(star wars) | (empire | (strikes back -jarjar))"));
-            assertEquals(expected, Parse("(star + wars) |(empire | (strikes + back -jarjar) () )"));
-            assertEquals(expected, Parse("(star + | wars |) | --(--empire | + --(strikes + | --back + -jarjar) \"\" ) \""));
-        }
-
-        [Test]
-        public void TestComplex06()
-        {
-            BooleanQuery expected = new BooleanQuery();
-            BooleanQuery inner1 = new BooleanQuery();
-            BooleanQuery inner2 = new BooleanQuery();
-            BooleanQuery inner3 = new BooleanQuery();
-
-            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
-            expected.Add(inner1, BooleanClause.Occur.MUST);
-
-            inner1.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.SHOULD);
-            inner1.Add(inner2, BooleanClause.Occur.SHOULD);
-
-            inner2.Add(inner3, BooleanClause.Occur.MUST);
-            inner3.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
-            inner3.Add(new TermQuery(new Term("field", "strikes")), BooleanClause.Occur.SHOULD);
-            inner2.Add(new TermQuery(new Term("field", "back")), BooleanClause.Occur.MUST);
-            inner2.Add(new TermQuery(new Term("field", "jar+|jar")), BooleanClause.Occur.MUST);
-
-            assertEquals(expected, Parse("star (wars | (empire | strikes back jar\\+\\|jar))"));
-            assertEquals(expected, Parse("star + (wars |(empire | strikes + back jar\\+\\|jar) () )"));
-            assertEquals(expected, Parse("star + (| wars | | --(--empire | + --strikes + | --back + jar\\+\\|jar) \"\" ) \""));
-        }
-
-        /** test a term with field weights */
-        [Test]
-        public void TestWeightedTerm()
-        {
-            IDictionary<string, float> weights = new Dictionary<string, float>();
-            weights["field0"] = 5f;
-            weights["field1"] = 10f;
-
-            BooleanQuery expected = new BooleanQuery(true);
-            Query field0 = new TermQuery(new Term("field0", "foo"));
-            field0.Boost = (5f);
-            expected.Add(field0, BooleanClause.Occur.SHOULD);
-            Query field1 = new TermQuery(new Term("field1", "foo"));
-            field1.Boost = (10f);
-            expected.Add(field1, BooleanClause.Occur.SHOULD);
-
-            Analyzer analyzer = new MockAnalyzer(Random());
-            SimpleQueryParser parser = new SimpleQueryParser(analyzer, weights);
-            assertEquals(expected, parser.Parse("foo"));
-        }
-
-        /** test a more complex query with field weights */
-        [Test]
-        public void testWeightedOR()
-        {
-            IDictionary<string, float> weights = new Dictionary<string, float>();
-            weights["field0"] = 5f;
-            weights["field1"] = 10f;
-
-            BooleanQuery expected = new BooleanQuery();
-            BooleanQuery foo = new BooleanQuery(true);
-            Query field0 = new TermQuery(new Term("field0", "foo"));
-            field0.Boost = (5f);
-            foo.Add(field0, BooleanClause.Occur.SHOULD);
-            Query field1 = new TermQuery(new Term("field1", "foo"));
-            field1.Boost = (10f);
-            foo.Add(field1, BooleanClause.Occur.SHOULD);
-            expected.Add(foo, BooleanClause.Occur.SHOULD);
-
-            BooleanQuery bar = new BooleanQuery(true);
-            field0 = new TermQuery(new Term("field0", "bar"));
-            field0.Boost = (5f);
-            bar.Add(field0, BooleanClause.Occur.SHOULD);
-            field1 = new TermQuery(new Term("field1", "bar"));
-            field1.Boost = (10f);
-            bar.Add(field1, BooleanClause.Occur.SHOULD);
-            expected.Add(bar, BooleanClause.Occur.SHOULD);
-
-            Analyzer analyzer = new MockAnalyzer(Random());
-            SimpleQueryParser parser = new SimpleQueryParser(analyzer, weights);
-            assertEquals(expected, parser.Parse("foo|bar"));
-        }
-
-        /** helper to parse a query with keyword analyzer across "field" */
-        private Query ParseKeyword(string text, int flags)
-        {
-            Analyzer analyzer = new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false);
-            SimpleQueryParser parser = new SimpleQueryParser(analyzer,
-                new HashMap<string, float>() { { "field", 1f } },
-                flags);
-            return parser.Parse(text);
-        }
-
-        /** test the ability to enable/disable phrase operator */
-        [Test]
-        public void TestDisablePhrase()
-        {
-            Query expected = new TermQuery(new Term("field", "\"test\""));
-            assertEquals(expected, ParseKeyword("\"test\"", SimpleQueryParser.PHRASE_OPERATOR));
-        }
-
-        /** test the ability to enable/disable prefix operator */
-        [Test]
-        public void TestDisablePrefix()
-        {
-            Query expected = new TermQuery(new Term("field", "test*"));
-            assertEquals(expected, ParseKeyword("test*", SimpleQueryParser.PREFIX_OPERATOR));
-        }
-
-        /** test the ability to enable/disable AND operator */
-        [Test]
-        public void TestDisableAND()
-        {
-            Query expected = new TermQuery(new Term("field", "foo+bar"));
-            assertEquals(expected, ParseKeyword("foo+bar", SimpleQueryParser.AND_OPERATOR));
-            expected = new TermQuery(new Term("field", "+foo+bar"));
-            assertEquals(expected, ParseKeyword("+foo+bar", SimpleQueryParser.AND_OPERATOR));
-        }
-
-        /** test the ability to enable/disable OR operator */
-        [Test]
-        public void TestDisableOR()
-        {
-            Query expected = new TermQuery(new Term("field", "foo|bar"));
-            assertEquals(expected, ParseKeyword("foo|bar", SimpleQueryParser.OR_OPERATOR));
-            expected = new TermQuery(new Term("field", "|foo|bar"));
-            assertEquals(expected, ParseKeyword("|foo|bar", SimpleQueryParser.OR_OPERATOR));
-        }
-
-        /** test the ability to enable/disable NOT operator */
-        [Test]
-        public void TestDisableNOT()
-        {
-            Query expected = new TermQuery(new Term("field", "-foo"));
-            assertEquals(expected, ParseKeyword("-foo", SimpleQueryParser.NOT_OPERATOR));
-        }
-
-        /** test the ability to enable/disable precedence operators */
-        [Test]
-        public void TestDisablePrecedence()
-        {
-            Query expected = new TermQuery(new Term("field", "(foo)"));
-            assertEquals(expected, ParseKeyword("(foo)", SimpleQueryParser.PRECEDENCE_OPERATORS));
-            expected = new TermQuery(new Term("field", ")foo("));
-            assertEquals(expected, ParseKeyword(")foo(", SimpleQueryParser.PRECEDENCE_OPERATORS));
-        }
-
-        /** test the ability to enable/disable escape operators */
-        [Test]
-        public void TestDisableEscape()
-        {
-            Query expected = new TermQuery(new Term("field", "foo\\bar"));
-            assertEquals(expected, ParseKeyword("foo\\bar", SimpleQueryParser.ESCAPE_OPERATOR));
-            assertEquals(expected, ParseKeyword("(foo\\bar)", SimpleQueryParser.ESCAPE_OPERATOR));
-            assertEquals(expected, ParseKeyword("\"foo\\bar\"", SimpleQueryParser.ESCAPE_OPERATOR));
-        }
-
-        [Test]
-        public void TestDisableWhitespace()
-        {
-            Query expected = new TermQuery(new Term("field", "foo foo"));
-            assertEquals(expected, ParseKeyword("foo foo", SimpleQueryParser.WHITESPACE_OPERATOR));
-            expected = new TermQuery(new Term("field", " foo foo\n "));
-            assertEquals(expected, ParseKeyword(" foo foo\n ", SimpleQueryParser.WHITESPACE_OPERATOR));
-            expected = new TermQuery(new Term("field", "\t\tfoo foo foo"));
-            assertEquals(expected, ParseKeyword("\t\tfoo foo foo", SimpleQueryParser.WHITESPACE_OPERATOR));
-        }
-
-        [Test]
-        public void TestDisableFuzziness()
-        {
-            Query expected = new TermQuery(new Term("field", "foo~1"));
-            assertEquals(expected, ParseKeyword("foo~1", SimpleQueryParser.FUZZY_OPERATOR));
-        }
-
-        [Test]
-        public void TestDisableSlop()
-        {
-            PhraseQuery expectedPhrase = new PhraseQuery();
-            expectedPhrase.Add(new Term("field", "foo"));
-            expectedPhrase.Add(new Term("field", "bar"));
-
-            BooleanQuery expected = new BooleanQuery();
-            expected.Add(expectedPhrase, BooleanClause.Occur.MUST);
-            expected.Add(new TermQuery(new Term("field", "~2")), BooleanClause.Occur.MUST);
-            assertEquals(expected, Parse("\"foo bar\"~2", SimpleQueryParser.NEAR_OPERATOR));
-        }
-
-        // we aren't supposed to barf on any input...
-        [Test]
-        public void TestRandomQueries()
-        {
-            for (int i = 0; i < 1000; i++)
-            {
-                string query = TestUtil.RandomUnicodeString(Random());
-                Parse(query); // no exception
-                ParseKeyword(query, TestUtil.NextInt(Random(), 0, 1024)); // no exception
-            }
-        }
-
-        [Test]
-        public void testRandomQueries2()
-        {
-            char[] chars = new char[] { 'a', '1', '|', '&', ' ', '(', ')', '"', '-', '~' };
-            StringBuilder sb = new StringBuilder();
-            for (int i = 0; i < 1000; i++)
-            {
-                sb.Length = (0);
-                int queryLength = Random().Next(20);
-                for (int j = 0; j < queryLength; j++)
-                {
-                    sb.append(chars[Random().Next(chars.Length)]);
-                }
-                Parse(sb.toString()); // no exception
-                ParseKeyword(sb.toString(), TestUtil.NextInt(Random(), 0, 1024)); // no exception
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs b/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs
deleted file mode 100644
index 6f7fcfc..0000000
--- a/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs
+++ /dev/null
@@ -1,142 +0,0 @@
-\ufeffusing Lucene.Net.Index;
-using Lucene.Net.Search;
-using NUnit.Framework;
-using System;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    public class BooleanQueryTst
-    {
-        private string queryText;
-        private readonly int[] expectedDocNrs;
-        private SingleFieldTestDb dBase;
-        private string fieldName;
-        private Assert testCase;
-        private BasicQueryFactory qf;
-        private bool verbose = true;
-
-        public BooleanQueryTst(
-            string queryText,
-            int[] expectedDocNrs,
-            SingleFieldTestDb dBase,
-            string fieldName,
-            Assert testCase,
-            BasicQueryFactory qf)
-        {
-            this.queryText = queryText;
-            this.expectedDocNrs = expectedDocNrs;
-            this.dBase = dBase;
-            this.fieldName = fieldName;
-            this.testCase = testCase;
-            this.qf = qf;
-        }
-
-        public virtual bool Verbose { set { this.verbose = value; } }
-
-        public virtual string QueryText { get { return this.queryText; } }
-
-        public virtual int[] ExpectedDocNrs { get { return this.expectedDocNrs; } }
-
-        internal class TestCollector : Collector
-        { // FIXME: use check hits from Lucene tests
-            private int totalMatched;
-            private bool[] encountered;
-            private Scorer scorer = null;
-            private int docBase = 0;
-            private BooleanQueryTst parent;
-
-            public TestCollector(BooleanQueryTst parent)
-            {
-                totalMatched = 0;
-                encountered = new bool[parent.expectedDocNrs.Length];
-                this.parent = parent;
-            }
-
-            public override Scorer Scorer
-            {
-                set { this.scorer = value; }
-            }
-
-            public override bool AcceptsDocsOutOfOrder()
-            {
-                return true;
-            }
-
-            public override AtomicReaderContext NextReader
-            {
-                set { docBase = value.DocBase; }
-            }
-
-            public override void Collect(int docNr)
-            {
-                float score = scorer.Score();
-                docNr += docBase;
-                /* System.out.println(docNr + " '" + dBase.getDocs()[docNr] + "': " + score); */
-                Assert.True(score > 0.0, parent.QueryText + ": positive score");
-                Assert.True(totalMatched < parent.ExpectedDocNrs.Length, parent.QueryText + ": too many hits");
-                int i;
-                for (i = 0; i < parent.expectedDocNrs.Length; i++)
-                {
-                    if ((!encountered[i]) && (parent.ExpectedDocNrs[i] == docNr))
-                    {
-                        encountered[i] = true;
-                        break;
-                    }
-                }
-                if (i == parent.ExpectedDocNrs.Length)
-                {
-                    Assert.True(false, parent.QueryText + ": doc nr for hit not expected: " + docNr);
-                }
-                totalMatched++;
-            }
-
-            public void CheckNrHits()
-            {
-                Assert.AreEqual(parent.ExpectedDocNrs.Length, totalMatched, parent.QueryText + ": nr of hits");
-            }
-        }
-
-        public void DoTest()
-        {
-
-            if (verbose)
-            {
-                Console.WriteLine("");
-                Console.WriteLine("Query: " + queryText);
-            }
-
-            SrndQuery lq = Parser.QueryParser.Parse(queryText);
-
-            /* if (verbose) System.out.println("Srnd: " + lq.toString()); */
-
-            Search.Query query = lq.MakeLuceneQueryField(fieldName, qf);
-            /* if (verbose) System.out.println("Lucene: " + query.toString()); */
-
-            TestCollector tc = new TestCollector(this);
-            using (IndexReader reader = DirectoryReader.Open(dBase.Db))
-            {
-                IndexSearcher searcher = new IndexSearcher(reader);
-
-                searcher.Search(query, tc);
-            }
-            tc.CheckNrHits();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Surround/Query/ExceptionQueryTst.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Surround/Query/ExceptionQueryTst.cs b/Lucene.Net.Tests.QueryParser/Surround/Query/ExceptionQueryTst.cs
deleted file mode 100644
index 7468ef9..0000000
--- a/Lucene.Net.Tests.QueryParser/Surround/Query/ExceptionQueryTst.cs
+++ /dev/null
@@ -1,76 +0,0 @@
-\ufeffusing Lucene.Net.QueryParser.Surround.Parser;
-using System;
-using System.Text;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    public class ExceptionQueryTst
-    {
-        private string queryText;
-        private bool verbose;
-
-        public ExceptionQueryTst(string queryText, bool verbose)
-        {
-            this.queryText = queryText;
-            this.verbose = verbose;
-        }
-
-        public void DoTest(StringBuilder failQueries)
-        {
-            bool pass = false;
-            SrndQuery lq = null;
-            try
-            {
-                lq = Parser.QueryParser.Parse(queryText);
-                if (verbose)
-                {
-                    Console.WriteLine("Query: " + queryText + "\nParsed as: " + lq.ToString());
-                }
-            }
-            catch (ParseException e)
-            {
-                if (verbose)
-                {
-                    Console.WriteLine("Parse exception for query:\n"
-                                      + queryText + "\n"
-                                      + e.Message);
-                }
-                pass = true;
-            }
-            if (!pass)
-            {
-                failQueries.append(queryText);
-                failQueries.append("\nParsed as: ");
-                failQueries.append(lq.toString());
-                failQueries.append("\n");
-            }
-        }
-
-        public static string GetFailQueries(string[] exceptionQueries, bool verbose)
-        {
-            StringBuilder failQueries = new StringBuilder();
-            for (int i = 0; i < exceptionQueries.Length; i++)
-            {
-                new ExceptionQueryTst(exceptionQueries[i], verbose).DoTest(failQueries);
-            }
-            return failQueries.toString();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs b/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs
deleted file mode 100644
index 1221835..0000000
--- a/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs
+++ /dev/null
@@ -1,55 +0,0 @@
-\ufeffusing Lucene.Net.Analysis;
-using Lucene.Net.Documents;
-using Lucene.Net.Index;
-using Lucene.Net.Store;
-using Lucene.Net.Util;
-using System;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    public class SingleFieldTestDb
-    {
-        private Directory db;
-        private string[] docs;
-        private string fieldName;
-
-        public SingleFieldTestDb(Random random, string[] documents, string fName)
-        {
-            db = new MockDirectoryWrapper(random, new RAMDirectory());
-            docs = documents;
-            fieldName = fName;
-            using (IndexWriter writer = new IndexWriter(db, new IndexWriterConfig(
-                LuceneVersion.LUCENE_CURRENT,
-                new MockAnalyzer(random))))
-            {
-                for (int j = 0; j < docs.Length; j++)
-                {
-                    Document d = new Document();
-                    d.Add(new TextField(fieldName, docs[j], Field.Store.NO));
-                    writer.AddDocument(d);
-                }
-            }
-        }
-
-        public Directory Db { get { return db; } }
-        public string[] Docs { get { return docs; } }
-        public string Fieldname { get { return fieldName; } }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Surround/Query/SrndQueryTest.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Surround/Query/SrndQueryTest.cs b/Lucene.Net.Tests.QueryParser/Surround/Query/SrndQueryTest.cs
deleted file mode 100644
index ebe7e2b..0000000
--- a/Lucene.Net.Tests.QueryParser/Surround/Query/SrndQueryTest.cs
+++ /dev/null
@@ -1,48 +0,0 @@
-\ufeffusing Lucene.Net.Search;
-using Lucene.Net.Util;
-using NUnit.Framework;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    [TestFixture]
-    public class SrndQueryTest : LuceneTestCase
-    {
-        private void CheckEqualParsings(string s1, string s2)
-        {
-            string fieldName = "foo";
-            BasicQueryFactory qf = new BasicQueryFactory(16);
-            Search.Query lq1, lq2;
-            lq1 = Parser.QueryParser.Parse(s1).MakeLuceneQueryField(fieldName, qf);
-            lq2 = Parser.QueryParser.Parse(s2).MakeLuceneQueryField(fieldName, qf);
-            QueryUtils.CheckEqual(lq1, lq2);
-        }
-
-        [Test]
-        public void TestHashEquals()
-        {
-            //grab some sample queries from Test02Boolean and Test03Distance and
-            //check there hashes and equals
-            CheckEqualParsings("word1 w word2", " word1  w  word2 ");
-            CheckEqualParsings("2N(w1,w2,w3)", " 2N(w1, w2 , w3)");
-            CheckEqualParsings("abc?", " abc? ");
-            CheckEqualParsings("w*rd?", " w*rd?");
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Surround/Query/Test01Exceptions.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Surround/Query/Test01Exceptions.cs b/Lucene.Net.Tests.QueryParser/Surround/Query/Test01Exceptions.cs
deleted file mode 100644
index 6ebc87a..0000000
--- a/Lucene.Net.Tests.QueryParser/Surround/Query/Test01Exceptions.cs
+++ /dev/null
@@ -1,72 +0,0 @@
-\ufeffusing Lucene.Net.Util;
-using NUnit.Framework;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    [TestFixture]
-    public class Test01Exceptions_ : LuceneTestCase
-    {
-        /** Main for running test case by itself. */
-        //public static void Main(string[] args)
-        //{
-        //    TestRunner.run(new TestSuite(Test01Exceptions.class));
-        //}
-
-        private bool verbose = false; /* to show actual parsing error messages */
-        private readonly string fieldName = "bi";
-
-        string[] exceptionQueries = {
-            "*",
-            "a*",
-            "ab*",
-            "?",
-            "a?",
-            "ab?",
-            "a???b",
-            "a?",
-            "a*b?",
-            "word1 word2",
-            "word2 AND",
-            "word1 OR",
-            "AND(word2)",
-            "AND(word2,)",
-            "AND(word2,word1,)",
-            "OR(word2)",
-            "OR(word2 ,",
-            "OR(word2 , word1 ,)",
-            "xx NOT",
-            "xx (a AND b)",
-            "(a AND b",
-            "a OR b)",
-            "or(word2+ not ord+, and xyz,def)",
-            ""
-        };
-
-        [Test]
-        public void Test01Exceptions()
-        {
-            string m = ExceptionQueryTst.GetFailQueries(exceptionQueries, verbose);
-            if (m.Length > 0)
-            {
-                fail("No ParseException for:\n" + m);
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Surround/Query/Test02Boolean.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Surround/Query/Test02Boolean.cs b/Lucene.Net.Tests.QueryParser/Surround/Query/Test02Boolean.cs
deleted file mode 100644
index aef9279..0000000
--- a/Lucene.Net.Tests.QueryParser/Surround/Query/Test02Boolean.cs
+++ /dev/null
@@ -1,178 +0,0 @@
-\ufeffusing Lucene.Net.Util;
-using NUnit.Framework;
-using System;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    [TestFixture]
-    public class Test02Boolean : LuceneTestCase
-    {
-        //public static void Main(string[] args) {
-        //    TestRunner.run(new TestSuite(Test02Boolean.class));
-        //}
-
-        private readonly string fieldName = "bi";
-        private bool verbose = false;
-        private int maxBasicQueries = 16;
-
-        string[] docs1 = {
-            "word1 word2 word3",
-            "word4 word5",
-            "ord1 ord2 ord3",
-            "orda1 orda2 orda3 word2 worda3",
-            "a c e a b c"
-        };
-
-        public override void SetUp()
-        {
-            base.SetUp();
-            db1 = new SingleFieldTestDb(Random(), docs1, fieldName);
-        }
-
-        private SingleFieldTestDb db1;
-
-
-        public void NormalTest1(String query, int[] expdnrs)
-        {
-            BooleanQueryTst bqt = new BooleanQueryTst(query, expdnrs, db1, fieldName, this,
-                                                        new BasicQueryFactory(maxBasicQueries));
-            bqt.Verbose = (verbose);
-            bqt.DoTest();
-        }
-
-        [Test]
-        public void Test02Terms01()
-        {
-            int[] expdnrs = { 0 }; NormalTest1("word1", expdnrs);
-        }
-        [Test]
-        public void Test02Terms02()
-        {
-            int[] expdnrs = { 0, 1, 3 }; NormalTest1("word*", expdnrs);
-        }
-        [Test]
-        public void Test02Terms03()
-        {
-            int[] expdnrs = { 2 }; NormalTest1("ord2", expdnrs);
-        }
-        [Test]
-        public void Test02Terms04()
-        {
-            int[] expdnrs = { }; NormalTest1("kxork*", expdnrs);
-        }
-        [Test]
-        public void Test02Terms05()
-        {
-            int[] expdnrs = { 0, 1, 3 }; NormalTest1("wor*", expdnrs);
-        }
-        [Test]
-        public void Test02Terms06()
-        {
-            int[] expdnrs = { }; NormalTest1("ab", expdnrs);
-        }
-
-        [Test]
-        public void Test02Terms10()
-        {
-            int[] expdnrs = { }; NormalTest1("abc?", expdnrs);
-        }
-        [Test]
-        public void Test02Terms13()
-        {
-            int[] expdnrs = { 0, 1, 3 }; NormalTest1("word?", expdnrs);
-        }
-        [Test]
-        public void Test02Terms14()
-        {
-            int[] expdnrs = { 0, 1, 3 }; NormalTest1("w?rd?", expdnrs);
-        }
-        [Test]
-        public void Test02Terms20()
-        {
-            int[] expdnrs = { 0, 1, 3 }; NormalTest1("w*rd?", expdnrs);
-        }
-        [Test]
-        public void Test02Terms21()
-        {
-            int[] expdnrs = { 3 }; NormalTest1("w*rd??", expdnrs);
-        }
-        [Test]
-        public void Test02Terms22()
-        {
-            int[] expdnrs = { 3 }; NormalTest1("w*?da?", expdnrs);
-        }
-        [Test]
-        public void Test02Terms23()
-        {
-            int[] expdnrs = { }; NormalTest1("w?da?", expdnrs);
-        }
-
-        [Test]
-        public void Test03And01()
-        {
-            int[] expdnrs = { 0 }; NormalTest1("word1 AND word2", expdnrs);
-        }
-        [Test]
-        public void Test03And02()
-        {
-            int[] expdnrs = { 3 }; NormalTest1("word* and ord*", expdnrs);
-        }
-        [Test]
-        public void Test03And03()
-        {
-            int[] expdnrs = { 0 }; NormalTest1("and(word1,word2)", expdnrs);
-        }
-        [Test]
-        public void Test04Or01()
-        {
-            int[] expdnrs = { 0, 3 }; NormalTest1("word1 or word2", expdnrs);
-        }
-        [Test]
-        public void Test04Or02()
-        {
-            int[] expdnrs = { 0, 1, 2, 3 }; NormalTest1("word* OR ord*", expdnrs);
-        }
-        [Test]
-        public void Test04Or03()
-        {
-            int[] expdnrs = { 0, 3 }; NormalTest1("OR (word1, word2)", expdnrs);
-        }
-        [Test]
-        public void Test05Not01()
-        {
-            int[] expdnrs = { 3 }; NormalTest1("word2 NOT word1", expdnrs);
-        }
-        [Test]
-        public void Test05Not02()
-        {
-            int[] expdnrs = { 0 }; NormalTest1("word2* not ord*", expdnrs);
-        }
-        [Test]
-        public void Test06AndOr01()
-        {
-            int[] expdnrs = { 0 }; NormalTest1("(word1 or ab)and or(word2,xyz, defg)", expdnrs);
-        }
-        [Test]
-        public void Test07AndOrNot02()
-        {
-            int[] expdnrs = { 0 }; NormalTest1("or( word2* not ord*, and(xyz,def))", expdnrs);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.Tests.QueryParser/Surround/Query/Test03Distance.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Surround/Query/Test03Distance.cs b/Lucene.Net.Tests.QueryParser/Surround/Query/Test03Distance.cs
deleted file mode 100644
index 6a19cb7..0000000
--- a/Lucene.Net.Tests.QueryParser/Surround/Query/Test03Distance.cs
+++ /dev/null
@@ -1,341 +0,0 @@
-\ufeffusing Lucene.Net.Util;
-using NUnit.Framework;
-using System;
-
-namespace Lucene.Net.QueryParser.Surround.Query
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    [TestFixture]
-    public class Test03Distance : LuceneTestCase
-    {
-        //public static void Main(string[] args) {
-        //    TestRunner.run(new TestSuite(Test03Distance.class));
-        //}
-
-        private bool verbose = false;
-        private int maxBasicQueries = 16;
-
-        private string[] exceptionQueries = {
-            "(aa and bb) w cc",
-            "(aa or bb) w (cc and dd)",
-            "(aa opt bb) w cc",
-            "(aa not bb) w cc",
-            "(aa or bb) w (bi:cc)",
-            "(aa or bb) w bi:cc",
-            "(aa or bi:bb) w cc",
-            "(aa or (bi:bb)) w cc",
-            "(aa or (bb and dd)) w cc"
-        };
-
-        [Test]
-        public void Test00Exceptions()
-        {
-            string m = ExceptionQueryTst.GetFailQueries(exceptionQueries, verbose);
-            if (m.Length > 0)
-            {
-                fail("No ParseException for:\n" + m);
-            }
-        }
-
-        private readonly string fieldName = "bi";
-
-        private string[] docs1 = {
-            "word1 word2 word3",
-            "word4 word5",
-            "ord1 ord2 ord3",
-            "orda1 orda2 orda3 word2 worda3",
-            "a c e a b c"
-        };
-
-        SingleFieldTestDb db1;
-
-        public override void SetUp()
-        {
-            base.SetUp();
-            db1 = new SingleFieldTestDb(Random(), docs1, fieldName);
-            db2 = new SingleFieldTestDb(Random(), docs2, fieldName);
-            db3 = new SingleFieldTestDb(Random(), docs3, fieldName);
-        }
-
-        private void DistanceTst(String query, int[] expdnrs, SingleFieldTestDb db)
-        {
-            BooleanQueryTst bqt = new BooleanQueryTst(query, expdnrs, db, fieldName, this,
-                                                        new BasicQueryFactory(maxBasicQueries));
-            bqt.Verbose = (verbose);
-            bqt.DoTest();
-        }
-
-        public void DistanceTest1(string query, int[] expdnrs)
-        {
-            DistanceTst(query, expdnrs, db1);
-        }
-
-        [Test]
-        public void Test0W01()
-        {
-            int[] expdnrs = { 0 }; DistanceTest1("word1 w word2", expdnrs);
-        }
-        [Test]
-        public void Test0N01()
-        {
-            int[] expdnrs = { 0 }; DistanceTest1("word1 n word2", expdnrs);
-        }
-        [Test]
-        public void Test0N01r()
-        { /* r reverse */
-            int[] expdnrs = { 0 }; DistanceTest1("word2 n word1", expdnrs);
-        }
-        [Test]
-        public void Test0W02()
-        {
-            int[] expdnrs = { }; DistanceTest1("word2 w word1", expdnrs);
-        }
-        [Test]
-        public void Test0W03()
-        {
-            int[] expdnrs = { }; DistanceTest1("word2 2W word1", expdnrs);
-        }
-        [Test]
-        public void Test0N03()
-        {
-            int[] expdnrs = { 0 }; DistanceTest1("word2 2N word1", expdnrs);
-        }
-        [Test]
-        public void Test0N03r()
-        {
-            int[] expdnrs = { 0 }; DistanceTest1("word1 2N word2", expdnrs);
-        }
-
-        [Test]
-        public void Test0W04()
-        {
-            int[] expdnrs = { }; DistanceTest1("word2 3w word1", expdnrs);
-        }
-
-        [Test]
-        public void Test0N04()
-        {
-            int[] expdnrs = { 0 }; DistanceTest1("word2 3n word1", expdnrs);
-        }
-        [Test]
-        public void Test0N04r()
-        {
-            int[] expdnrs = { 0 }; DistanceTest1("word1 3n word2", expdnrs);
-        }
-
-        [Test]
-        public void Test0W05()
-        {
-            int[] expdnrs = { }; DistanceTest1("orda1 w orda3", expdnrs);
-        }
-        [Test]
-        public void Test0W06()
-        {
-            int[] expdnrs = { 3 }; DistanceTest1("orda1 2w orda3", expdnrs);
-        }
-
-        [Test]
-        public void Test1Wtrunc01()
-        {
-            int[] expdnrs = { 0 }; DistanceTest1("word1* w word2", expdnrs);
-        }
-        [Test]
-        public void Test1Wtrunc02()
-        {
-            int[] expdnrs = { 0 }; DistanceTest1("word* w word2", expdnrs);
-        }
-        [Test]
-        public void Test1Wtrunc02r()
-        {
-            int[] expdnrs = { 0, 3 }; DistanceTest1("word2 w word*", expdnrs);
-        }
-        [Test]
-        public void Test1Ntrunc02()
-        {
-            int[] expdnrs = { 0, 3 }; DistanceTest1("word* n word2", expdnrs);
-        }
-        [Test]
-        public void Test1Ntrunc02r()
-        {
-            int[] expdnrs = { 0, 3 }; DistanceTest1("word2 n word*", expdnrs);
-        }
-
-        [Test]
-        public void Test1Wtrunc03()
-        {
-            int[] expdnrs = { 0 }; DistanceTest1("word1* w word2*", expdnrs);
-        }
-        [Test]
-        public void Test1Ntrunc03()
-        {
-            int[] expdnrs = { 0 }; DistanceTest1("word1* N word2*", expdnrs);
-        }
-
-        [Test]
-        public void Test1Wtrunc04()
-        {
-            int[] expdnrs = { }; DistanceTest1("kxork* w kxor*", expdnrs);
-        }
-        [Test]
-        public void Test1Ntrunc04()
-        {
-            int[] expdnrs = { }; DistanceTest1("kxork* 99n kxor*", expdnrs);
-        }
-
-        [Test]
-        public void Test1Wtrunc05()
-        {
-            int[] expdnrs = { }; DistanceTest1("word2* 2W word1*", expdnrs);
-        }
-        [Test]
-        public void Test1Ntrunc05()
-        {
-            int[] expdnrs = { 0 }; DistanceTest1("word2* 2N word1*", expdnrs);
-        }
-
-        [Test]
-        public void Test1Wtrunc06()
-        {
-            int[] expdnrs = { 3 }; DistanceTest1("ord* W word*", expdnrs);
-        }
-        [Test]
-        public void Test1Ntrunc06()
-        {
-            int[] expdnrs = { 3 }; DistanceTest1("ord* N word*", expdnrs);
-        }
-        [Test]
-        public void Test1Ntrunc06r()
-        {
-            int[] expdnrs = { 3 }; DistanceTest1("word* N ord*", expdnrs);
-        }
-
-        [Test]
-        public void Test1Wtrunc07()
-        {
-            int[] expdnrs = { 3 }; DistanceTest1("(orda2 OR orda3) W word*", expdnrs);
-        }
-        [Test]
-        public void Test1Wtrunc08()
-        {
-            int[] expdnrs = { 3 }; DistanceTest1("(orda2 OR orda3) W (word2 OR worda3)", expdnrs);
-        }
-        [Test]
-        public void Test1Wtrunc09()
-        {
-            int[] expdnrs = { 3 }; DistanceTest1("(orda2 OR orda3) 2W (word2 OR worda3)", expdnrs);
-        }
-        [Test]
-        public void Test1Ntrunc09()
-        {
-            int[] expdnrs = { 3 }; DistanceTest1("(orda2 OR orda3) 2N (word2 OR worda3)", expdnrs);
-        }
-
-        string[] docs2 = {
-            "w1 w2 w3 w4 w5",
-            "w1 w3 w2 w3",
-            ""
-        };
-
-        SingleFieldTestDb db2;
-
-        public void DistanceTest2(string query, int[] expdnrs)
-        {
-            DistanceTst(query, expdnrs, db2);
-        }
-
-        [Test]
-        public void Test2Wprefix01()
-        {
-            int[] expdnrs = { 0 }; DistanceTest2("W (w1, w2, w3)", expdnrs);
-        }
-        [Test]
-        public void Test2Nprefix01a()
-        {
-            int[] expdnrs = { 0, 1 }; DistanceTest2("N(w1, w2, w3)", expdnrs);
-        }
-        [Test]
-        public void Test2Nprefix01b()
-        {
-            int[] expdnrs = { 0, 1 }; DistanceTest2("N(w3, w1, w2)", expdnrs);
-        }
-
-        [Test]
-        public void Test2Wprefix02()
-        {
-            int[] expdnrs = { 0, 1 }; DistanceTest2("2W(w1,w2,w3)", expdnrs);
-        }
-
-        [Test]
-        public void Test2Nprefix02a()
-        {
-            int[] expdnrs = { 0, 1 }; DistanceTest2("2N(w1,w2,w3)", expdnrs);
-        }
-        [Test]
-        public void Test2Nprefix02b()
-        {
-            int[] expdnrs = { 0, 1 }; DistanceTest2("2N(w2,w3,w1)", expdnrs);
-        }
-
-        [Test]
-        public void Test2Wnested01()
-        {
-            int[] expdnrs = { 0 }; DistanceTest2("w1 W w2 W w3", expdnrs);
-        }
-        [Test]
-        public void Test2Nnested01()
-        {
-            int[] expdnrs = { 0 }; DistanceTest2("w1 N w2 N w3", expdnrs);
-        }
-
-        [Test]
-        public void Test2Wnested02()
-        {
-            int[] expdnrs = { 0, 1 }; DistanceTest2("w1 2W w2 2W w3", expdnrs);
-        }
-        [Test]
-        public void Test2Nnested02()
-        {
-            int[] expdnrs = { 0, 1 }; DistanceTest2("w1 2N w2 2N w3", expdnrs);
-        }
-
-        string[] docs3 = {
-            "low pressure temperature inversion and rain",
-            "when the temperature has a negative height above a depression no precipitation gradient is expected",
-            "when the temperature has a negative height gradient above a depression no precipitation is expected",
-            ""
-        };
-
-        SingleFieldTestDb db3;
-
-        public void DistanceTest3(string query, int[] expdnrs)
-        {
-            DistanceTst(query, expdnrs, db3);
-        }
-
-        [Test]
-        public void Test3Example01()
-        {
-            int[] expdnrs = { 0, 2 }; // query does not match doc 1 because "gradient" is in wrong place there.
-            DistanceTest3("50n((low w pressure*) or depression*,"
-                           + "5n(temperat*, (invers* or (negativ* 3n gradient*))),"
-                           + "rain* or precipitat*)",
-                           expdnrs);
-        }
-    }
-}


[41/50] [abbrv] lucenenet git commit: Fixed ugly string formatting in the ToStringUtils.Boost code that was carried over from Luene.Net 3.0.3. Added a test to ensure that the functionality is identical.

Posted by sy...@apache.org.
Fixed ugly string formatting in the ToStringUtils.Boost code that was carried over from Luene.Net 3.0.3. Added a test to ensure that the functionality is identical.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/66ab301f
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/66ab301f
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/66ab301f

Branch: refs/heads/master
Commit: 66ab301f2916759160da9faf9eca0f5576b2968e
Parents: 5706931
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Aug 7 18:02:47 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:31:14 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.Core/Util/ToStringUtils.cs       |  6 +--
 src/Lucene.Net.Tests/Lucene.Net.Tests.csproj    |  1 +
 .../core/Util/TestToStringUtils.cs              | 48 ++++++++++++++++++++
 3 files changed, 51 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/66ab301f/src/Lucene.Net.Core/Util/ToStringUtils.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/ToStringUtils.cs b/src/Lucene.Net.Core/Util/ToStringUtils.cs
index e5f5a0d..3e1f938 100644
--- a/src/Lucene.Net.Core/Util/ToStringUtils.cs
+++ b/src/Lucene.Net.Core/Util/ToStringUtils.cs
@@ -37,10 +37,8 @@ namespace Lucene.Net.Util
         {
             if (boost != 1.0f)
             {
-                float boostAsLong = (long)boost;
-                if (boostAsLong == boost)
-                    return "^" + boost.ToString(".0").Replace(CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator, ".");
-                return "^" + boost.ToString().Replace(CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator, ".");
+                // .NET compatibility fix
+                return "^" + boost.ToString(".0######", CultureInfo.InvariantCulture);
             }
             else
                 return "";

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/66ab301f/src/Lucene.Net.Tests/Lucene.Net.Tests.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Lucene.Net.Tests.csproj b/src/Lucene.Net.Tests/Lucene.Net.Tests.csproj
index 5c148a2..4755965 100644
--- a/src/Lucene.Net.Tests/Lucene.Net.Tests.csproj
+++ b/src/Lucene.Net.Tests/Lucene.Net.Tests.csproj
@@ -589,6 +589,7 @@
     <Compile Include="core\Util\TestTimSorter.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="core\Util\TestToStringUtils.cs" />
     <Compile Include="core\Util\TestUnicodeUtil.cs">
       <SubType>Code</SubType>
     </Compile>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/66ab301f/src/Lucene.Net.Tests/core/Util/TestToStringUtils.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Util/TestToStringUtils.cs b/src/Lucene.Net.Tests/core/Util/TestToStringUtils.cs
new file mode 100644
index 0000000..a48cd23
--- /dev/null
+++ b/src/Lucene.Net.Tests/core/Util/TestToStringUtils.cs
@@ -0,0 +1,48 @@
+\ufeffusing Lucene.Net.Util;
+using NUnit.Framework;
+using System.Globalization;
+using System.Threading;
+
+namespace Lucene.Net.Core.Util
+{
+    /// <summary>
+    /// This test was added for .NET compatibility
+    /// </summary>
+    public class TestToStringUtils : LuceneTestCase
+    {
+        CultureInfo originalCulture;
+        public override void SetUp()
+        {
+            base.SetUp();
+            originalCulture = Thread.CurrentThread.CurrentCulture;
+        }
+
+        public override void TearDown()
+        {
+            Thread.CurrentThread.CurrentCulture = originalCulture;
+            base.TearDown();
+        }
+
+        /// <summary>
+        /// Check to ensure that the Boost function is properly converted in every possible culture.
+        /// </summary>
+        [Test]
+        public void TestBoost()
+        {
+            float boostNormal = 1f;
+            float boostFractional = 2.5f;
+            float boostNonFractional = 5f;
+            float boostLong = 1.111111111f;
+
+            foreach (CultureInfo culture in CultureInfo.GetCultures(CultureTypes.SpecificCultures | CultureTypes.NeutralCultures))
+            {
+                Thread.CurrentThread.CurrentCulture = culture;
+
+                assertEquals("", ToStringUtils.Boost(boostNormal));
+                assertEquals("^2.5", ToStringUtils.Boost(boostFractional));
+                assertEquals("^5.0", ToStringUtils.Boost(boostNonFractional));
+                assertEquals("^1.111111", ToStringUtils.Boost(boostLong));
+            }
+        }
+    }
+}


[25/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs b/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
new file mode 100644
index 0000000..aac1505
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
@@ -0,0 +1,1356 @@
+using System;
+using System.Diagnostics.CodeAnalysis;
+using System.IO;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+	/// <summary>Token Manager. </summary>
+	public class QueryParserTokenManager //: QueryParserConstants
+	{
+		private void  InitBlock()
+		{
+			StreamWriter temp_writer;
+			temp_writer = new StreamWriter(Console.OpenStandardOutput(), Console.Out.Encoding);
+			temp_writer.AutoFlush = true;
+			debugStream = temp_writer;
+		}
+		
+		/// <summary>Debug output. </summary>
+		public StreamWriter debugStream;
+		/// <summary>Set debug output. </summary>
+		public virtual void  SetDebugStream(StreamWriter ds)
+		{
+			debugStream = ds;
+		}
+		private int JjStopStringLiteralDfa_2(int pos, long active0)
+		{
+			switch (pos)
+			{
+				
+				default: 
+					return - 1;
+				
+			}
+		}
+		private int JjStartNfa_2(int pos, long active0)
+		{
+			return JjMoveNfa_2(JjStopStringLiteralDfa_2(pos, active0), pos + 1);
+		}
+		private int JjStopAtPos(int pos, int kind)
+		{
+			jjmatchedKind = kind;
+			jjmatchedPos = pos;
+			return pos + 1;
+		}
+		private int JjMoveStringLiteralDfa0_2()
+		{
+			switch (curChar)
+			{
+				
+				case (char) (40): 
+					return JjStopAtPos(0, 14);
+				
+				case (char) (41): 
+					return JjStopAtPos(0, 15);
+				
+				case (char) (42): 
+					return JjStartNfaWithStates_2(0, 17, 49);
+				
+				case (char) (43):
+                    return JjStartNfaWithStates_2(0, 11, 15);
+				
+				case (char) (45):
+                    return JjStartNfaWithStates_2(0, 12, 15);
+				
+				case (char) (58): 
+					return JjStopAtPos(0, 16);
+				
+				case (char) (91): 
+					return JjStopAtPos(0, 25);
+				
+				case (char) (94): 
+					return JjStopAtPos(0, 18);
+				
+				case (char) (123): 
+					return JjStopAtPos(0, 26);
+				
+				default:
+                    return JjMoveNfa_2(0, 0);
+				
+			}
+		}
+		private int JjStartNfaWithStates_2(int pos, int kind, int state)
+		{
+			jjmatchedKind = kind;
+			jjmatchedPos = pos;
+			try
+			{
+				curChar = input_stream.ReadChar();
+			}
+			catch (IOException)
+			{
+				return pos + 1;
+			}
+            return JjMoveNfa_2(state, pos + 1);
+		}
+		internal static readonly ulong[] jjbitVec0 = new ulong[]{0x1L, 0x0L, 0x0L, 0x0L};
+		internal static readonly ulong[] jjbitVec1 = new ulong[]{0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL};
+		internal static readonly ulong[] jjbitVec3 = new ulong[]{0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL};
+		internal static readonly ulong[] jjbitVec4 = new ulong[]{0xfffefffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL};
+        private int JjMoveNfa_2(int startState, int curPos)
+		{
+			int startsAt = 0;
+            jjnewStateCnt = 49;
+			int i = 1;
+			jjstateSet[0] = startState;
+			int kind = 0x7fffffff;
+			for (; ; )
+			{
+				if (++jjround == 0x7fffffff)
+					ReInitRounds();
+				if (curChar < 64)
+				{
+					ulong l = (ulong) (1L << (int) curChar);
+					do 
+					{
+						switch (jjstateSet[--i])
+						{
+							
+							case 49: 
+							case 33:
+                                if ((0xfbff7cf8ffffd9ffL & l) == (ulong)0L)
+									break;
+								if (kind > 23)
+									kind = 23;
+								JjCheckNAddTwoStates(33, 34);
+								break;
+							
+							case 0:
+                                if ((0xfbff54f8ffffd9ffL & l) != (ulong)0L)
+								{
+									if (kind > 23)
+										kind = 23;
+									JjCheckNAddTwoStates(33, 34);
+								}
+								else if ((0x100002600L & l) != 0L)
+								{
+									if (kind > 7)
+										kind = 7;
+								}
+                                else if ((0x280200000000L & l) != 0L)
+                                    jjstateSet[jjnewStateCnt++] = 15;
+								else if (curChar == 47)
+									JjCheckNAddStates(0, 2);
+								else if (curChar == 34)
+                                    JjCheckNAddStates(3, 5);
+                                if ((0x7bff50f8ffffd9ffL & l) != 0L)
+								{
+									if (kind > 20)
+										kind = 20;
+                                    JjCheckNAddStates(6, 10);
+								}
+                                else if (curChar == 42)
+                                {
+                                    if (kind > 22)
+                                        kind = 22;
+                                }
+                                else if (curChar == 33)
+                                {
+                                    if (kind > 10)
+                                        kind = 10;
+                                }
+								if (curChar == 38)
+									jjstateSet[jjnewStateCnt++] = 4;
+								break;
+							
+							case 4: 
+								if (curChar == 38 && kind > 8)
+									kind = 8;
+								break;
+							
+							case 5: 
+								if (curChar == 38)
+									jjstateSet[jjnewStateCnt++] = 4;
+								break;
+							
+							case 13: 
+								if (curChar == 33 && kind > 10)
+									kind = 10;
+								break;
+							
+							case 14:
+                                if ((0x280200000000L & l) != 0L)
+                                    jjstateSet[jjnewStateCnt++] = 15;
+                                break;
+                            case 15:
+                                if ((0x100002600L & l) != 0L && kind > 13)
+                                    kind = 13;
+                                break;
+                            case 16:
+								if (curChar == 34)
+									JjCheckNAddStates(3, 5);
+								break;
+							case 17: 
+								if ((0xfffffffbffffffffL & l) != (ulong) 0L)
+									JjCheckNAddStates(3, 5);
+								break;
+							
+							case 19: 
+								JjCheckNAddStates(3, 5);
+								break;
+							
+							case 20: 
+								if (curChar == 34 && kind > 19)
+									kind = 19;
+								break;
+							
+							case 22: 
+								if ((0x3ff000000000000L & l) == 0L)
+									break;
+								if (kind > 21)
+									kind = 21;
+								JjCheckNAddStates(11, 14);
+								break;
+							
+							case 23: 
+								if (curChar == 46)
+									JjCheckNAdd(24);
+								break;
+							
+							case 24: 
+								if ((0x3ff000000000000L & l) == 0L)
+									break;
+								if (kind > 21)
+									kind = 21;
+                                JjCheckNAddStates(15, 17);
+								break;
+							
+							case 25:
+                                if ((0x7bff78f8ffffd9ffL & l) == (ulong)0L)
+									break;
+								if (kind > 21)
+									kind = 21;
+								JjCheckNAddTwoStates(25, 26);
+								break;
+							
+							case 27: 
+								if (kind > 21)
+									kind = 21;
+								JjCheckNAddTwoStates(25, 26);
+								break;
+							
+							case 28:
+                                if ((0x7bff78f8ffffd9ffL & l) == 0L)
+									break;
+								if (kind > 21)
+									kind = 21;
+								JjCheckNAddTwoStates(28, 29);
+								break;
+							
+							case 30: 
+								if (kind > 21)
+									kind = 21;
+								JjCheckNAddTwoStates(28, 29);
+								break;
+							
+							case 31:
+                                if (curChar == 42 && kind > 22)
+                                    kind = 22;
+								break;
+							
+							case 32:
+                                if ((0xfbff54f8ffffd9ffL & l) == (ulong)0L)
+                                    break;
+                                if (kind > 23)
+                                    kind = 23;
+                                JjCheckNAddTwoStates(33, 34);
+                                break;
+                            case 35:
+                                if (kind > 23)
+                                    kind = 23;
+                                JjCheckNAddTwoStates(33, 34);
+                                break;
+                            case 36:
+                            case 38:
+                                if (curChar == 47)
+                                    JjCheckNAddStates(0, 2);
+                                break;
+                            case 37:
+                                if ((0xffff7fffffffffffL & l) != (ulong)0L)
+                                    JjCheckNAddStates(0, 2);
+                                break;
+                            case 40:
+                                if (curChar == 47 && kind > 24)
+                                    kind = 24;
+                                break;
+                            case 41:
+                                if ((0x7bff50f8ffffd9ffL & l) == 0L)
+                                    break;
+                                if (kind > 20)
+                                    kind = 20;
+                                JjCheckNAddStates(6, 10);
+                                break;
+                            case 42:
+                                if ((0x7bff78f8ffffd9ffL & l) == 0L)
+                                    break;
+                                if (kind > 20)
+                                    kind = 20;
+                                JjCheckNAddTwoStates(42, 43);
+                                break;
+                            case 44:
+                                if (kind > 20)
+                                    kind = 20;
+                                JjCheckNAddTwoStates(42, 43);
+                                break;
+                            case 45:
+                                if ((0x7bff78f8ffffd9ffL & l) != 0L)
+                                    JjCheckNAddStates(18, 20);
+                                break;
+                            case 47:
+                                JjCheckNAddStates(18, 20);
+                                break;
+							
+							default:  break;
+							
+						}
+					}
+					while (i != startsAt);
+				}
+				else if (curChar < 128)
+				{
+                    // NOTE: This didn't change in Java from 3.0.1 to 4.8.0, but it is different in .NET.
+                    // But changing it back made more tests pass, so I am working under the assumption 63
+                    // is the correct value.
+                    //ulong l = (ulong)(1L << (curChar & 077));
+                    ulong l = (ulong) (1L << (curChar & 63)); 
+					do 
+					{
+						switch (jjstateSet[--i])
+						{
+							
+							case 49: 
+								if ((0x97ffffff87ffffffL & l) != (ulong) 0L)
+								{
+									if (kind > 23)
+										kind = 23;
+									JjCheckNAddTwoStates(33, 34);
+								}
+								else if (curChar == 92)
+									JjCheckNAddTwoStates(35, 35);
+								break;
+							
+							case 0: 
+								if ((0x97ffffff87ffffffL & l) != (ulong) 0L)
+								{
+									if (kind > 20)
+										kind = 20;
+									JjCheckNAddStates(6, 10);
+								}
+								else if (curChar == 92)
+									JjCheckNAddStates(21, 23);
+								else if (curChar == 126)
+								{
+									if (kind > 21)
+										kind = 21;
+                                    JjCheckNAddStates(24, 26);
+								}
+								if ((0x97ffffff87ffffffL & l) != (ulong) 0L)
+								{
+									if (kind > 23)
+										kind = 23;
+									JjCheckNAddTwoStates(33, 34);
+								}
+								if (curChar == 78)
+									jjstateSet[jjnewStateCnt++] = 11;
+								else if (curChar == 124)
+									jjstateSet[jjnewStateCnt++] = 8;
+								else if (curChar == 79)
+									jjstateSet[jjnewStateCnt++] = 6;
+								else if (curChar == 65)
+									jjstateSet[jjnewStateCnt++] = 2;
+								break;
+							
+							case 1: 
+								if (curChar == 68 && kind > 8)
+									kind = 8;
+								break;
+							
+							case 2: 
+								if (curChar == 78)
+									jjstateSet[jjnewStateCnt++] = 1;
+								break;
+							
+							case 3: 
+								if (curChar == 65)
+									jjstateSet[jjnewStateCnt++] = 2;
+								break;
+							
+							case 6: 
+								if (curChar == 82 && kind > 9)
+									kind = 9;
+								break;
+							
+							case 7: 
+								if (curChar == 79)
+									jjstateSet[jjnewStateCnt++] = 6;
+								break;
+							
+							case 8: 
+								if (curChar == 124 && kind > 9)
+									kind = 9;
+								break;
+							
+							case 9: 
+								if (curChar == 124)
+									jjstateSet[jjnewStateCnt++] = 8;
+								break;
+							
+							case 10: 
+								if (curChar == 84 && kind > 10)
+									kind = 10;
+								break;
+							
+							case 11: 
+								if (curChar == 79)
+									jjstateSet[jjnewStateCnt++] = 10;
+								break;
+							
+							case 12: 
+								if (curChar == 78)
+									jjstateSet[jjnewStateCnt++] = 11;
+								break;
+							
+							case 17: 
+								if ((0xffffffffefffffffL & l) != (ulong) 0L)
+									JjCheckNAddStates(3, 5);
+								break;
+							
+							case 18: 
+								if (curChar == 92)
+									jjstateSet[jjnewStateCnt++] = 19;
+								break;
+							
+							case 19: 
+								JjCheckNAddStates(3, 5);
+								break;
+							
+							case 21: 
+								if (curChar != 126)
+									break;
+								if (kind > 21)
+									kind = 21;
+                                JjCheckNAddStates(24, 26);
+								break;
+							
+							case 25: 
+								if ((0x97ffffff87ffffffL & l) == (ulong) 0L)
+									break;
+								if (kind > 21)
+									kind = 21;
+								JjCheckNAddTwoStates(25, 26);
+								break;
+							
+							case 26: 
+								if (curChar == 92)
+									JjAddStates(27, 28);
+								break;
+							
+							case 27: 
+								if (kind > 21)
+									kind = 21;
+								JjCheckNAddTwoStates(25, 26);
+								break;
+							
+							case 28: 
+								if ((0x97ffffff87ffffffL & l) == (ulong) 0L)
+									break;
+								if (kind > 21)
+									kind = 21;
+								JjCheckNAddTwoStates(28, 29);
+								break;
+							
+							case 29:
+                                if (curChar == 92)
+                                    JjAddStates(29, 30);
+                                break;
+                            case 30:
+                                if (kind > 21)
+                                    kind = 21;
+                                JjCheckNAddTwoStates(28, 29);
+                                break;
+                            case 32:
+                                if ((0x97ffffff87ffffffL & l) == (ulong)0L)
+                                    break;
+                                if (kind > 23)
+                                    kind = 23;
+                                JjCheckNAddTwoStates(33, 34);
+                                break;
+                            case 33:
+                                if ((0x97ffffff87ffffffL & l) == (ulong)0L)
+                                    break;
+                                if (kind > 23)
+                                    kind = 23;
+                                JjCheckNAddTwoStates(33, 34);
+                                break;
+                            case 34:
+                                if (curChar == 92)
+                                    JjCheckNAddTwoStates(35, 35);
+                                break;
+                            case 35:
+                                if (kind > 23)
+                                    kind = 23;
+                                JjCheckNAddTwoStates(33, 34);
+                                break;
+                            case 37:
+                                JjAddStates(0, 2);
+                                break;
+                            case 39:
+                                if (curChar == 92)
+                                    jjstateSet[jjnewStateCnt++] = 38;
+                                break;
+                            case 41:
+                                if ((0x97ffffff87ffffffL & l) == (ulong)0L)
+                                    break;
+                                if (kind > 20)
+                                    kind = 20;
+                                JjCheckNAddStates(6, 10);
+                                break;
+                            case 42:
+                                if ((0x97ffffff87ffffffL & l) == (ulong)0L)
+                                    break;
+                                if (kind > 20)
+                                    kind = 20;
+                                JjCheckNAddTwoStates(42, 43);
+                                break;
+                            case 43:
+                                if (curChar == 92)
+                                    JjCheckNAddTwoStates(44, 44);
+                                break;
+                            case 44:
+                                if (kind > 20)
+                                    kind = 20;
+                                JjCheckNAddTwoStates(42, 43);
+                                break;
+                            case 45:
+                                if ((0x97ffffff87ffffffL & l) != (ulong)0L)
+                                    JjCheckNAddStates(18, 20);
+                                break;
+                            case 46:
+                                if (curChar == 92)
+                                    JjCheckNAddTwoStates(47, 47);
+                                break;
+                            case 47:
+                                JjCheckNAddStates(18, 20);
+                                break;
+                            case 48:
+                                if (curChar == 92)
+                                    JjCheckNAddStates(21, 23);
+                                break;
+
+                            default: break;
+							
+						}
+					}
+					while (i != startsAt);
+				}
+				else
+				{
+					int hiByte = (int) (curChar >> 8);
+					int i1 = hiByte >> 6;
+					ulong l1 = (ulong) (1L << (hiByte & 63));
+					int i2 = (curChar & 0xff) >> 6;
+					ulong l2 = (ulong) (1L << (curChar & 63));
+					do 
+					{
+						switch (jjstateSet[--i])
+						{
+							
+							case 49: 
+							case 33: 
+								if (!JjCanMove_2(hiByte, i1, i2, l1, l2))
+									break;
+								if (kind > 23)
+									kind = 23;
+								JjCheckNAddTwoStates(33, 34);
+								break;
+							
+							case 0: 
+								if (JjCanMove_0(hiByte, i1, i2, l1, l2))
+								{
+									if (kind > 7)
+										kind = 7;
+								}
+								if (JjCanMove_2(hiByte, i1, i2, l1, l2))
+								{
+									if (kind > 23)
+										kind = 23;
+									JjCheckNAddTwoStates(33, 34);
+								}
+								if (JjCanMove_2(hiByte, i1, i2, l1, l2))
+								{
+									if (kind > 20)
+										kind = 20;
+									JjCheckNAddStates(6, 10);
+								}
+								break;
+							
+							case 15: 
+                                if (JjCanMove_0(hiByte, i1, i2, l1, l2) && kind > 13)
+                                    kind = 13;
+                                break;
+							case 17: 
+                            case 19:
+								if (JjCanMove_1(hiByte, i1, i2, l1, l2))
+									JjCheckNAddStates(3, 5);
+								break;
+							
+							case 25: 
+								if (!JjCanMove_2(hiByte, i1, i2, l1, l2))
+									break;
+								if (kind > 21)
+									kind = 21;
+								JjCheckNAddTwoStates(25, 26);
+								break;
+							
+							case 27: 
+								if (!JjCanMove_1(hiByte, i1, i2, l1, l2))
+									break;
+								if (kind > 21)
+									kind = 21;
+								JjCheckNAddTwoStates(25, 26);
+								break;
+							
+							case 28: 
+								if (!JjCanMove_2(hiByte, i1, i2, l1, l2))
+									break;
+								if (kind > 21)
+									kind = 21;
+								JjCheckNAddTwoStates(28, 29);
+								break;
+                            case 30:
+                                if (!JjCanMove_1(hiByte, i1, i2, l1, l2))
+                                    break;
+                                if (kind > 21)
+                                    kind = 21;
+                                JjCheckNAddTwoStates(28, 29);
+                                break;
+							case 32: 
+								if (!JjCanMove_2(hiByte, i1, i2, l1, l2))
+									break;
+								if (kind > 23)
+									kind = 23;
+								JjCheckNAddTwoStates(33, 34);
+								break;
+							
+							case 35: 
+								if (!JjCanMove_1(hiByte, i1, i2, l1, l2))
+									break;
+								if (kind > 23)
+									kind = 23;
+								JjCheckNAddTwoStates(33, 34);
+								break;
+
+                            case 37:
+                                if (JjCanMove_1(hiByte, i1, i2, l1, l2))
+                                    JjAddStates(0, 2);
+                                break;
+                            case 41:
+                                if (!JjCanMove_2(hiByte, i1, i2, l1, l2))
+                                    break;
+                                if (kind > 20)
+                                    kind = 20;
+                                JjCheckNAddStates(6, 10);
+                                break;
+                            case 42:
+                                if (!JjCanMove_2(hiByte, i1, i2, l1, l2))
+                                    break;
+                                if (kind > 20)
+                                    kind = 20;
+                                JjCheckNAddTwoStates(42, 43);
+                                break;
+                            case 44:
+                                if (!JjCanMove_1(hiByte, i1, i2, l1, l2))
+                                    break;
+                                if (kind > 20)
+                                    kind = 20;
+                                JjCheckNAddTwoStates(42, 43);
+                                break;
+                            case 45:
+								if (JjCanMove_2(hiByte, i1, i2, l1, l2))
+									JjCheckNAddStates(18, 20);
+								break;
+							
+							case 47: 
+								if (JjCanMove_1(hiByte, i1, i2, l1, l2))
+									JjCheckNAddStates(18, 20);
+								break;
+							
+							default:  break;
+							
+						}
+					}
+					while (i != startsAt);
+				}
+				if (kind != 0x7fffffff)
+				{
+					jjmatchedKind = kind;
+					jjmatchedPos = curPos;
+					kind = 0x7fffffff;
+				}
+				++curPos;
+				if ((i = jjnewStateCnt) == (startsAt = 49 - (jjnewStateCnt = startsAt)))
+					return curPos;
+				try
+				{
+					curChar = input_stream.ReadChar();
+				}
+				catch (System.IO.IOException)
+				{
+					return curPos;
+				}
+			}
+		}
+		private int JjMoveStringLiteralDfa0_0()
+		{
+			return JjMoveNfa_0(0, 0);
+		}
+		private int JjMoveNfa_0(int startState, int curPos)
+		{
+			int startsAt = 0;
+			jjnewStateCnt = 3;
+			int i = 1;
+			jjstateSet[0] = startState;
+			int kind = 0x7fffffff;
+			for (; ; )
+			{
+				if (++jjround == 0x7fffffff)
+					ReInitRounds();
+				if (curChar < 64)
+				{
+					ulong l = (ulong) (1L << (int) curChar);
+					do 
+					{
+						switch (jjstateSet[--i])
+						{
+							
+							case 0: 
+								if ((0x3ff000000000000L & l) == 0L)
+									break;
+								if (kind > 27)
+									kind = 27;
+								JjAddStates(31, 32);
+								break;
+							
+							case 1: 
+								if (curChar == 46)
+									JjCheckNAdd(2);
+								break;
+							
+							case 2: 
+								if ((0x3ff000000000000L & l) == 0L)
+									break;
+								if (kind > 27)
+									kind = 27;
+								JjCheckNAdd(2);
+								break;
+							
+							default:  break;
+							
+						}
+					}
+					while (i != startsAt);
+				}
+				else if (curChar < 128)
+				{
+					ulong l = (ulong) (1L << (curChar & 63));
+					do 
+					{
+						switch (jjstateSet[--i])
+						{
+							
+							default:  break;
+							
+						}
+					}
+					while (i != startsAt);
+				}
+				else
+				{
+					int hiByte = (int) (curChar >> 8);
+					int i1 = hiByte >> 6;
+					long l1 = 1L << (hiByte & 63);
+					int i2 = (curChar & 0xff) >> 6;
+					long l2 = 1L << (curChar & 63);
+					do 
+					{
+						switch (jjstateSet[--i])
+						{
+							
+							default:  break;
+							
+						}
+					}
+					while (i != startsAt);
+				}
+				if (kind != 0x7fffffff)
+				{
+					jjmatchedKind = kind;
+					jjmatchedPos = curPos;
+					kind = 0x7fffffff;
+				}
+				++curPos;
+				if ((i = jjnewStateCnt) == (startsAt = 3 - (jjnewStateCnt = startsAt)))
+					return curPos;
+				try
+				{
+					curChar = input_stream.ReadChar();
+				}
+				catch (System.IO.IOException)
+				{
+					return curPos;
+				}
+			}
+		}
+		private int JjStopStringLiteralDfa_1(int pos, long active0)
+		{
+			switch (pos)
+			{
+				
+				case 0:
+                    if ((active0 & 0x10000000L) != 0L)
+					{
+						jjmatchedKind = 32;
+						return 6;
+					}
+					return - 1;
+				
+				default: 
+					return - 1;
+				
+			}
+		}
+		private int JjStartNfa_1(int pos, long active0)
+		{
+			return JjMoveNfa_1(JjStopStringLiteralDfa_1(pos, active0), pos + 1);
+		}
+		private int JjMoveStringLiteralDfa0_1()
+		{
+			switch (curChar)
+			{
+				
+				case (char)84:
+                    return JjMoveStringLiteralDfa1_1(0x10000000L);
+				
+				case (char)93: 
+					return JjStopAtPos(0, 29);
+
+                case (char)125:
+                    return JjStopAtPos(0, 30);
+
+				default: 
+					return JjMoveNfa_1(0, 0);
+				
+			}
+		}
+		private int JjMoveStringLiteralDfa1_1(long active0)
+		{
+			try
+			{
+				curChar = input_stream.ReadChar();
+			}
+			catch (System.IO.IOException)
+			{
+				JjStopStringLiteralDfa_1(0, active0);
+				return 1;
+			}
+			switch (curChar)
+			{
+				
+				case (char) (79):
+                    if ((active0 & 0x10000000L) != 0L)
+						return JjStartNfaWithStates_1(1, 28, 6);
+					break;
+				
+				default: 
+					break;
+				
+			}
+			return JjStartNfa_1(0, active0);
+		}
+		private int JjStartNfaWithStates_1(int pos, int kind, int state)
+		{
+			jjmatchedKind = kind;
+			jjmatchedPos = pos;
+			try
+			{
+				curChar = input_stream.ReadChar();
+			}
+			catch (System.IO.IOException)
+			{
+				return pos + 1;
+			}
+			return JjMoveNfa_1(state, pos + 1);
+		}
+		private int JjMoveNfa_1(int startState, int curPos)
+		{
+			int startsAt = 0;
+			jjnewStateCnt = 7;
+			int i = 1;
+			jjstateSet[0] = startState;
+			int kind = 0x7fffffff;
+			for (; ; )
+			{
+				if (++jjround == 0x7fffffff)
+					ReInitRounds();
+				if (curChar < 64)
+				{
+					ulong l = (ulong) (1L << (int) curChar);
+					do 
+					{
+						switch (jjstateSet[--i])
+						{
+							
+							case 0:
+                                if ((0xfffffffeffffffffL & l) != (ulong)0L)
+								{
+									if (kind > 32)
+										kind = 32;
+									JjCheckNAdd(6);
+								}
+								if ((0x100002600L & l) != 0L)
+								{
+									if (kind > 7)
+										kind = 7;
+								}
+								else if (curChar == 34)
+									JjCheckNAddTwoStates(2, 4);
+								break;
+							
+							case 1: 
+								if (curChar == 34)
+									JjCheckNAddTwoStates(2, 4);
+								break;
+							
+							case 2:
+                                if ((0xfffffffbffffffffL & l) != (ulong)0L)
+									JjCheckNAddStates(33, 35);
+								break;
+							
+							case 3: 
+								if (curChar == 34)
+									JjCheckNAddStates(33, 35);
+								break;
+							
+							case 5: 
+								if (curChar == 34 && kind > 31)
+									kind = 31;
+								break;
+							
+							case 6:
+                                if ((0xfffffffeffffffffL & l) == (ulong)0L)
+									break;
+								if (kind > 32)
+									kind = 32;
+								JjCheckNAdd(6);
+								break;
+							
+							default:  break;
+							
+						}
+					}
+					while (i != startsAt);
+				}
+				else if (curChar < 128)
+				{
+					ulong l = (ulong) (1L << (curChar & 63));
+					do 
+					{
+						switch (jjstateSet[--i])
+						{
+							
+							case 0: 
+							case 6:
+                                if ((0xdfffffffdfffffffL & l) == (ulong)0L)
+									break;
+								if (kind > 32)
+									kind = 32;
+								JjCheckNAdd(6);
+								break;
+							
+							case 2: 
+								JjAddStates(33, 35);
+								break;
+							
+							case 4: 
+								if (curChar == 92)
+									jjstateSet[jjnewStateCnt++] = 3;
+								break;
+							
+							default:  break;
+							
+						}
+					}
+					while (i != startsAt);
+				}
+				else
+				{
+					int hiByte = (int) (curChar >> 8);
+					int i1 = hiByte >> 6;
+					ulong l1 = (ulong) (1L << (hiByte & 63));
+					int i2 = (curChar & 0xff) >> 6;
+					ulong l2 = (ulong) (1L << (curChar & 63));
+					do 
+					{
+						switch (jjstateSet[--i])
+						{
+							
+							case 0: 
+								if (JjCanMove_0(hiByte, i1, i2, l1, l2))
+								{
+									if (kind > 7)
+										kind = 7;
+								}
+								if (JjCanMove_1(hiByte, i1, i2, l1, l2))
+								{
+									if (kind > 32)
+										kind = 32;
+									JjCheckNAdd(6);
+								}
+								break;
+							
+							case 2: 
+								if (JjCanMove_1(hiByte, i1, i2, l1, l2))
+									JjAddStates(33, 35);
+								break;
+							
+							case 6: 
+								if (!JjCanMove_1(hiByte, i1, i2, l1, l2))
+									break;
+								if (kind > 32)
+									kind = 32;
+								JjCheckNAdd(6);
+								break;
+							
+							default:  break;
+							
+						}
+					}
+					while (i != startsAt);
+				}
+				if (kind != 0x7fffffff)
+				{
+					jjmatchedKind = kind;
+					jjmatchedPos = curPos;
+					kind = 0x7fffffff;
+				}
+				++curPos;
+				if ((i = jjnewStateCnt) == (startsAt = 7 - (jjnewStateCnt = startsAt)))
+					return curPos;
+				try
+				{
+					curChar = input_stream.ReadChar();
+				}
+				catch (System.IO.IOException)
+				{
+					return curPos;
+				}
+			}
+		}
+        internal static readonly int[] jjnextStates = new int[]{
+           37, 39, 40, 17, 18, 20, 42, 45, 31, 46, 43, 22, 23, 25, 26, 24, 
+           25, 26, 45, 31, 46, 44, 47, 35, 22, 28, 29, 27, 27, 30, 30, 0, 
+           1, 2, 4, 5
+        };
+		private static bool JjCanMove_0(int hiByte, int i1, int i2, ulong l1, ulong l2)
+		{
+			switch (hiByte)
+			{
+				
+				case 48: 
+					return ((jjbitVec0[i2] & l2) != (ulong) 0L);
+				
+				default: 
+					return false;
+				
+			}
+		}
+		private static bool JjCanMove_1(int hiByte, int i1, int i2, ulong l1, ulong l2)
+		{
+			switch (hiByte)
+			{
+				
+				case 0: 
+					return ((jjbitVec3[i2] & l2) != (ulong) 0L);
+				
+				default: 
+					if ((jjbitVec1[i1] & l1) != (ulong) 0L)
+						return true;
+					return false;
+				
+			}
+		}
+		private static bool JjCanMove_2(int hiByte, int i1, int i2, ulong l1, ulong l2)
+		{
+			switch (hiByte)
+			{
+				
+				case 0: 
+					return ((jjbitVec3[i2] & l2) != (ulong) 0L);
+				
+				case 48: 
+					return ((jjbitVec1[i2] & l2) != (ulong) 0L);
+				
+				default: 
+					if ((jjbitVec4[i1] & l1) != (ulong) 0L)
+						return true;
+					return false;
+				
+			}
+		}
+
+        ///// <summary>Token literal values. </summary>
+        //public static readonly string[] jjstrLiteralImages = new string[] { 
+        //    "", null, null, null, null, null, null, null, null, null, null, "\x002B", "\x002D", 
+        //    "\x0028", "\x0029", "\x003A", "\x002A", "\x005E", null, null, null, null, null, "\x005B", "\x007B", 
+        //    null, "\x0054\x004F", "\x005D", null, null, "\x0054\x004F", "\x007D", null, null };
+		
+
+		/// <summary>Token literal values. </summary>
+		public static readonly string[] jjstrLiteralImages = new string[]{
+            "", null, null, null, null, null, null, null, null, null, null, "\x002B", "\x002D", 
+            null, "\x0028", "\x0029", "\x003A", "\x002A", "\x005E", null, null, null, null, null, null, 
+            "\x005B", "\x007B", null, "\x0054\x004F", "\x005D", "\x007D", null, null };
+		
+		/// <summary>Lexer state names. </summary>
+		public static readonly string[] lexStateNames = new string[] {
+            "Boost", 
+            "Range", 
+            "DEFAULT"
+        };
+		
+		/// <summary>Lex State array. </summary>
+		public static readonly int[] jjnewLexState = new int[] {
+            -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, -1, -1, -1, -1, -1, -1, 
+            1, 1, 2, -1, 2, 2, -1, -1
+        };
+        internal static readonly ulong[] jjtoToken = new ulong[] { 0x1ffffff01L };
+        internal static readonly long[] jjtoSkip = new long[] { 0x80L };
+		protected internal ICharStream input_stream;
+		private uint[] jjrounds = new uint[49];
+		private int[] jjstateSet = new int[98];
+		protected internal char curChar;
+		/// <summary>Constructor. </summary>
+		public QueryParserTokenManager(ICharStream stream)
+		{
+			InitBlock();
+			input_stream = stream;
+		}
+		
+		/// <summary>Constructor. </summary>
+		public QueryParserTokenManager(ICharStream stream, int lexState):this(stream)
+		{
+			SwitchTo(lexState);
+		}
+		
+		/// <summary>Reinitialise parser. </summary>
+		public virtual void  ReInit(ICharStream stream)
+		{
+			jjmatchedPos = jjnewStateCnt = 0;
+			curLexState = defaultLexState;
+			input_stream = stream;
+			ReInitRounds();
+		}
+		private void  ReInitRounds()
+		{
+			int i;
+			jjround = 0x80000001;
+			for (i = 49; i-- > 0; )
+				jjrounds[i] = 0x80000000;
+		}
+		
+		/// <summary>Reinitialise parser. </summary>
+		public virtual void  ReInit(ICharStream stream, int lexState)
+		{
+			ReInit(stream);
+			SwitchTo(lexState);
+		}
+		
+		/// <summary>Switch to specified lex state. </summary>
+		public virtual void  SwitchTo(int lexState)
+		{
+			if (lexState >= 3 || lexState < 0)
+				throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
+			else
+				curLexState = lexState;
+		}
+		
+		protected internal virtual Token JjFillToken()
+		{
+			Token t;
+			System.String curTokenImage;
+			int beginLine;
+			int endLine;
+			int beginColumn;
+			int endColumn;
+			System.String im = jjstrLiteralImages[jjmatchedKind];
+			curTokenImage = (im == null)?input_stream.Image:im;
+			beginLine = input_stream.BeginLine;
+			beginColumn = input_stream.BeginColumn;
+			endLine = input_stream.EndLine;
+			endColumn = input_stream.EndColumn;
+			t = Token.NewToken(jjmatchedKind, curTokenImage);
+			
+			t.beginLine = beginLine;
+			t.endLine = endLine;
+			t.beginColumn = beginColumn;
+			t.endColumn = endColumn;
+			
+			return t;
+		}
+		
+		internal int curLexState = 2;
+		internal int defaultLexState = 2;
+		internal int jjnewStateCnt;
+		internal uint jjround;
+		internal int jjmatchedPos;
+		internal int jjmatchedKind;
+		
+		/// <summary>Get the next Token. </summary>
+        [SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate")]
+        public virtual Token GetNextToken()
+		{
+			Token matchedToken;
+			int curPos = 0;
+			
+			for (; ; )
+			{
+				try
+				{
+					curChar = input_stream.BeginToken();
+				}
+				catch (IOException)
+				{
+					jjmatchedKind = 0;
+					matchedToken = JjFillToken();
+					return matchedToken;
+				}
+				
+				switch (curLexState)
+				{
+					
+					case 0: 
+						jjmatchedKind = 0x7fffffff;
+						jjmatchedPos = 0;
+						curPos = JjMoveStringLiteralDfa0_0();
+						break;
+					
+					case 1: 
+						jjmatchedKind = 0x7fffffff;
+						jjmatchedPos = 0;
+						curPos = JjMoveStringLiteralDfa0_1();
+						break;
+					
+					case 2: 
+						jjmatchedKind = 0x7fffffff;
+						jjmatchedPos = 0;
+						curPos = JjMoveStringLiteralDfa0_2();
+						break;
+					}
+				if (jjmatchedKind != 0x7fffffff)
+				{
+					if (jjmatchedPos + 1 < curPos)
+						input_stream.Backup(curPos - jjmatchedPos - 1);
+					if ((jjtoToken[jjmatchedKind >> 6] & ((ulong) 1L << (jjmatchedKind & 63))) != (ulong) 0L)
+					{
+						matchedToken = JjFillToken();
+						if (jjnewLexState[jjmatchedKind] != - 1)
+							curLexState = jjnewLexState[jjmatchedKind];
+						return matchedToken;
+					}
+					else
+					{
+						if (jjnewLexState[jjmatchedKind] != - 1)
+							curLexState = jjnewLexState[jjmatchedKind];
+						goto EOFLoop;
+					}
+				}
+				int error_line = input_stream.EndLine;
+				int error_column = input_stream.EndColumn;
+				System.String error_after = null;
+				bool EOFSeen = false;
+				try
+				{
+					input_stream.ReadChar(); input_stream.Backup(1);
+				}
+				catch (IOException)
+				{
+					EOFSeen = true;
+					error_after = curPos <= 1?"":input_stream.Image;
+					if (curChar == '\n' || curChar == '\r')
+					{
+						error_line++;
+						error_column = 0;
+					}
+					else
+						error_column++;
+				}
+				if (!EOFSeen)
+				{
+					input_stream.Backup(1);
+					error_after = curPos <= 1?"":input_stream.Image;
+				}
+				throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
+
+EOFLoop: ;
+			}
+		}
+		
+		private void  JjCheckNAdd(int state)
+		{
+			if (jjrounds[state] != jjround)
+			{
+				jjstateSet[jjnewStateCnt++] = state;
+				jjrounds[state] = jjround;
+			}
+		}
+		private void  JjAddStates(int start, int end)
+		{
+			do 
+			{
+				jjstateSet[jjnewStateCnt++] = jjnextStates[start];
+			}
+			while (start++ != end);
+		}
+		private void  JjCheckNAddTwoStates(int state1, int state2)
+		{
+			JjCheckNAdd(state1);
+			JjCheckNAdd(state2);
+		}
+		
+		private void  JjCheckNAddStates(int start, int end)
+		{
+			do 
+			{
+				JjCheckNAdd(jjnextStates[start]);
+			}
+			while (start++ != end);
+		}
+	}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Classic/Token.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/Token.cs b/src/Lucene.Net.QueryParser/Classic/Token.cs
new file mode 100644
index 0000000..389e7b3
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Classic/Token.cs
@@ -0,0 +1,142 @@
+using System;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+	
+	/// <summary> 
+    /// Describes the input token stream.
+    /// </summary>
+    [Serializable]
+	public class Token
+	{
+		
+		/// <summary> 
+        /// An integer that describes the kind of this token.  This numbering
+		/// system is determined by JavaCCParser, and a table of these numbers is
+		/// stored in the file ...Constants.java.
+		/// </summary>
+		public int kind;
+		
+		/// <summary>The line number of the first character of this Token. </summary>
+		public int beginLine;
+		/// <summary>The column number of the first character of this Token. </summary>
+		public int beginColumn;
+		/// <summary>The line number of the last character of this Token. </summary>
+		public int endLine;
+		/// <summary>The column number of the last character of this Token. </summary>
+		public int endColumn;
+		
+		/// <summary>The string image of the token.</summary>
+		public string image;
+		
+		/// <summary> 
+        /// A reference to the next regular (non-special) token from the input
+		/// stream.  If this is the last token from the input stream, or if the
+		/// token manager has not read tokens beyond this one, this field is
+		/// set to null.  This is true only if this token is also a regular
+		/// token.  Otherwise, see below for a description of the contents of
+		/// this field.
+		/// </summary>
+		public Token next;
+		
+		/// <summary> 
+        /// This field is used to access special tokens that occur prior to this
+		/// token, but after the immediately preceding regular (non-special) token.
+		/// If there are no such special tokens, this field is set to null.
+		/// When there are more than one such special token, this field refers
+		/// to the last of these special tokens, which in turn refers to the next
+		/// previous special token through its specialToken field, and so on
+		/// until the first special token (whose specialToken field is null).
+		/// The next fields of special tokens refer to other special tokens that
+		/// immediately follow it (without an intervening regular token).  If there
+		/// is no such token, this field is null.
+		/// </summary>
+		public Token specialToken;
+
+	    /// <summary> 
+        /// An optional attribute value of the Token.
+	    /// Tokens which are not used as syntactic sugar will often contain
+	    /// meaningful values that will be used later on by the compiler or
+	    /// interpreter. This attribute value is often different from the image.
+	    /// Any subclass of Token that actually wants to return a non-null value can
+	    /// override this method as appropriate.
+	    /// </summary>
+	    public virtual object Value
+	    {
+	        get { return null; }
+	    }
+
+	    /// <summary> 
+        /// No-argument constructor
+        /// </summary>
+		public Token()
+		{
+		}
+		
+		/// <summary> 
+        /// Constructs a new token for the specified Image.
+        /// </summary>
+		public Token(int kind)
+            : this(kind, null)
+		{
+		}
+		
+		/// <summary> 
+        /// Constructs a new token for the specified Image and Kind.
+        /// </summary>
+		public Token(int kind, string image)
+		{
+			this.kind = kind;
+			this.image = image;
+		}
+		
+		/// <summary> 
+        /// Returns the image.
+        /// </summary>
+		public override string ToString()
+		{
+			return image;
+		}
+		
+		/// <summary> 
+        /// Returns a new Token object, by default. However, if you want, you
+		/// can create and return subclass objects based on the value of ofKind.
+		/// Simply add the cases to the switch for all those special cases.
+		/// For example, if you have a subclass of Token called IDToken that
+		/// you want to create if ofKind is ID, simply add something like :
+		/// 
+		/// case MyParserConstants.ID : return new IDToken(ofKind, image);
+		/// 
+		/// to the following switch statement. Then you can cast matchedToken
+		/// variable to the appropriate type and use sit in your lexical actions.
+		/// </summary>
+		public static Token NewToken(int ofKind, string image)
+		{
+			switch (ofKind)
+			{
+				default:  return new Token(ofKind, image);
+			}
+		}
+		
+		public static Token NewToken(int ofKind)
+		{
+			return NewToken(ofKind, null);
+		}
+	}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Classic/TokenMgrError.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/TokenMgrError.cs b/src/Lucene.Net.QueryParser/Classic/TokenMgrError.cs
new file mode 100644
index 0000000..2f69e13
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Classic/TokenMgrError.cs
@@ -0,0 +1,170 @@
+using System;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+	/// <summary>Token Manager Error. </summary>
+	[Serializable]
+	public class TokenMgrError : Exception
+	{
+        /*
+		* Ordinals for various reasons why an Error of this type can be thrown.
+		*/
+
+        /// <summary> Lexical error occurred.</summary>
+        internal const int LEXICAL_ERROR = 0;
+
+        /// <summary> An attempt was made to create a second instance of a static token manager.</summary>
+        internal const int STATIC_LEXER_ERROR = 1;
+
+        /// <summary> Tried to change to an invalid lexical state.</summary>
+        internal const int INVALID_LEXICAL_STATE = 2;
+
+        /// <summary> Detected (and bailed out of) an infinite loop in the token manager.</summary>
+        internal const int LOOP_DETECTED = 3;
+
+        /// <summary> Indicates the reason why the exception is thrown. It will have
+        /// one of the above 4 values.
+        /// </summary>
+        internal int errorCode;
+
+        /// <summary> 
+        /// Replaces unprintable characters by their escaped (or unicode escaped)
+        /// equivalents in the given string
+        /// </summary>
+        protected internal static string AddEscapes(string str)
+        {
+            StringBuilder retval = new StringBuilder();
+            char ch;
+            for (int i = 0; i < str.Length; i++)
+            {
+                switch (str[i])
+                {
+
+                    case (char)(0):
+                        continue;
+
+                    case '\b':
+                        retval.Append("\\b");
+                        continue;
+
+                    case '\t':
+                        retval.Append("\\t");
+                        continue;
+
+                    case '\n':
+                        retval.Append("\\n");
+                        continue;
+
+                    case '\f':
+                        retval.Append("\\f");
+                        continue;
+
+                    case '\r':
+                        retval.Append("\\r");
+                        continue;
+
+                    case '\"':
+                        retval.Append("\\\"");
+                        continue;
+
+                    case '\'':
+                        retval.Append("\\\'");
+                        continue;
+
+                    case '\\':
+                        retval.Append("\\\\");
+                        continue;
+
+                    default:
+                        if ((ch = str[i]) < 0x20 || ch > 0x7e)
+                        {
+                            string s = "0000" + Convert.ToString(ch, 16);
+                            retval.Append("\\u" + s.Substring(s.Length - 4, (s.Length) - (s.Length - 4)));
+                        }
+                        else
+                        {
+                            retval.Append(ch);
+                        }
+                        continue;
+
+                }
+            }
+            return retval.ToString();
+        }
+
+        /// <summary>
+        /// Returns a detailed message for the Error when it is thrown by the
+        /// token manager to indicate a lexical error.
+        /// </summary>
+        /// <remarks>You can customize the lexical error message by modifying this method.</remarks>
+        /// <param name="EOFSeen">indicates if EOF caused the lexical error</param>
+        /// <param name="lexState">lexical state in which this error occurred</param>
+        /// <param name="errorLine">line number when the error occurred</param>
+        /// <param name="errorColumn">column number when the error occurred</param>
+        /// <param name="errorAfter">prefix that was seen before this error occurred</param>
+        /// <param name="curChar">the offending character</param>
+        /// <returns>Detailed error message</returns>
+        protected internal static string LexicalError(bool EOFSeen, int lexState, int errorLine, int errorColumn, string errorAfter, char curChar)
+        {
+            return ("Lexical error at line " +
+                errorLine + ", column " +
+                errorColumn + ".  Encountered: " +
+                (EOFSeen ? "<EOF> " : ("\"" + AddEscapes(Convert.ToString(curChar)) + "\"") + " (" + (int)curChar + "), ") +
+                "after : \"" + AddEscapes(errorAfter) + "\"");
+        }
+
+		/// <summary> 
+        /// You can also modify the body of this method to customize your error messages.
+		/// For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not
+		/// of end-users concern, so you can return something like :
+		/// 
+		/// "Internal Error : Please file a bug report .... "
+		/// 
+		/// from this method for such cases in the release version of your parser.
+		/// </summary>
+		public override string Message
+		{
+			get { return base.Message; }
+		}
+		
+		/*
+		* Constructors of various flavors follow.
+		*/
+		
+		/// <summary>No arg constructor. </summary>
+		public TokenMgrError()
+		{
+		}
+		
+		/// <summary>Constructor with message and reason. </summary>
+		public TokenMgrError(string message, int reason)
+            : base(message)
+		{
+			errorCode = reason;
+		}
+		
+		/// <summary>Full Constructor. </summary>
+		public TokenMgrError(bool EOFSeen, int lexState, int errorLine, int errorColumn, string errorAfter, char curChar, int reason)
+            : this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason)
+		{
+		}
+	}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs b/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
new file mode 100644
index 0000000..0ac7c5b
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
@@ -0,0 +1,468 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Index;
+using Lucene.Net.QueryParser.Classic;
+using Lucene.Net.Search;
+using Lucene.Net.Search.Spans;
+using Lucene.Net.Util;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Lucene.Net.QueryParser.ComplexPhrase
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// QueryParser which permits complex phrase query syntax eg "(john jon
+    /// jonathan~) peters*".
+    /// <p>
+    /// Performs potentially multiple passes over Query text to parse any nested
+    /// logic in PhraseQueries. - First pass takes any PhraseQuery content between
+    /// quotes and stores for subsequent pass. All other query content is parsed as
+    /// normal - Second pass parses any stored PhraseQuery content, checking all
+    /// embedded clauses are referring to the same field and therefore can be
+    /// rewritten as Span queries. All PhraseQuery clauses are expressed as
+    /// ComplexPhraseQuery objects
+    /// </p>
+    /// <p>
+    /// This could arguably be done in one pass using a new QueryParser but here I am
+    /// working within the constraints of the existing parser as a base class. This
+    /// currently simply feeds all phrase content through an analyzer to select
+    /// phrase terms - any "special" syntax such as * ~ * etc are not given special
+    /// status
+    /// </p>
+    /// </summary>
+    public class ComplexPhraseQueryParser : Classic.QueryParser
+    {
+        private List<ComplexPhraseQuery> complexPhrases = null;
+
+        private bool isPass2ResolvingPhrases;
+
+        /// <summary>
+        /// When <code>inOrder</code> is true, the search terms must
+        /// exists in the documents as the same order as in query.
+        /// Choose between ordered (true) or un-ordered (false) proximity search.
+        /// </summary>
+        public bool InOrder { get; set; }
+
+        private ComplexPhraseQuery currentPhraseQuery = null;
+
+        public ComplexPhraseQueryParser(LuceneVersion matchVersion, string f, Analyzer a)
+            : base(matchVersion, f, a)
+        {
+            // set property defaults
+            this.InOrder = true;
+        }
+
+        protected internal override Query GetFieldQuery(string field, string queryText, int slop)
+        {
+            ComplexPhraseQuery cpq = new ComplexPhraseQuery(field, queryText, slop, InOrder);
+            complexPhrases.Add(cpq); // add to list of phrases to be parsed once
+            // we
+            // are through with this pass
+            return cpq;
+        }
+
+        public override Query Parse(string query)
+        {
+            if (isPass2ResolvingPhrases)
+            {
+                MultiTermQuery.RewriteMethod oldMethod = MultiTermRewriteMethod;
+                try
+                {
+                    // Temporarily force BooleanQuery rewrite so that Parser will
+                    // generate visible
+                    // collection of terms which we can convert into SpanQueries.
+                    // ConstantScoreRewrite mode produces an
+                    // opaque ConstantScoreQuery object which cannot be interrogated for
+                    // terms in the same way a BooleanQuery can.
+                    // QueryParser is not guaranteed threadsafe anyway so this temporary
+                    // state change should not
+                    // present an issue
+                    MultiTermRewriteMethod = MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE;
+                    return base.Parse(query);
+                }
+                finally
+                {
+                    MultiTermRewriteMethod = oldMethod;
+                }
+            }
+
+            // First pass - parse the top-level query recording any PhraseQuerys
+            // which will need to be resolved
+            complexPhrases = new List<ComplexPhraseQuery>();
+            Query q = base.Parse(query);
+
+            // Perform second pass, using this QueryParser to parse any nested
+            // PhraseQueries with different
+            // set of syntax restrictions (i.e. all fields must be same)
+            isPass2ResolvingPhrases = true;
+            try
+            {
+                foreach (var currentPhraseQuery in complexPhrases)
+                {
+                    this.currentPhraseQuery = currentPhraseQuery;
+                    // in each phrase, now parse the contents between quotes as a
+                    // separate parse operation
+                    currentPhraseQuery.ParsePhraseElements(this);
+                }
+            }
+            finally
+            {
+                isPass2ResolvingPhrases = false;
+            }
+            return q;
+        }
+
+        // There is No "getTermQuery throws ParseException" method to override so
+        // unfortunately need
+        // to throw a runtime exception here if a term for another field is embedded
+        // in phrase query
+        protected override Query NewTermQuery(Term term)
+        {
+            if (isPass2ResolvingPhrases)
+            {
+                try
+                {
+                    CheckPhraseClauseIsForSameField(term.Field);
+                }
+                catch (ParseException pe)
+                {
+                    throw new Exception("Error parsing complex phrase", pe);
+                }
+            }
+            return base.NewTermQuery(term);
+        }
+
+        // Helper method used to report on any clauses that appear in query syntax
+        private void CheckPhraseClauseIsForSameField(string field)
+        {
+            if (!field.Equals(currentPhraseQuery.Field))
+            {
+                throw new ParseException("Cannot have clause for field \"" + field
+                    + "\" nested in phrase " + " for field \"" + currentPhraseQuery.Field
+                    + "\"");
+            }
+        }
+
+        protected internal override Query GetWildcardQuery(string field, string termStr)
+        {
+            if (isPass2ResolvingPhrases)
+            {
+                CheckPhraseClauseIsForSameField(field);
+            }
+            return base.GetWildcardQuery(field, termStr);
+        }
+
+        protected internal override Query GetRangeQuery(string field, string part1, string part2, bool startInclusive, bool endInclusive)
+        {
+            if (isPass2ResolvingPhrases)
+            {
+                CheckPhraseClauseIsForSameField(field);
+            }
+            return base.GetRangeQuery(field, part1, part2, startInclusive, endInclusive);
+        }
+
+        protected internal override Query NewRangeQuery(string field, string part1, string part2, bool startInclusive, bool endInclusive)
+        {
+            if (isPass2ResolvingPhrases)
+            {
+                // Must use old-style RangeQuery in order to produce a BooleanQuery
+                // that can be turned into SpanOr clause
+                TermRangeQuery rangeQuery = TermRangeQuery.NewStringRange(field, part1, part2, startInclusive, endInclusive);
+                rangeQuery.SetRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+                return rangeQuery;
+            }
+            return base.NewRangeQuery(field, part1, part2, startInclusive, endInclusive);
+        }
+
+        protected internal override Query GetFuzzyQuery(string field, string termStr, float minSimilarity)
+        {
+            if (isPass2ResolvingPhrases)
+            {
+                CheckPhraseClauseIsForSameField(field);
+            }
+            return base.GetFuzzyQuery(field, termStr, minSimilarity);
+        }
+
+        /// <summary>
+        /// Used to handle the query content in between quotes and produced Span-based
+        /// interpretations of the clauses.
+        /// </summary>
+        public class ComplexPhraseQuery : Query
+        {
+            private readonly string field;
+            private readonly string phrasedQueryStringContents;
+            private readonly int slopFactor;
+            private readonly bool inOrder;
+            private Query contents;
+
+            public ComplexPhraseQuery(string field, string phrasedQueryStringContents,
+                int slopFactor, bool inOrder)
+            {
+                this.field = field;
+                this.phrasedQueryStringContents = phrasedQueryStringContents;
+                this.slopFactor = slopFactor;
+                this.inOrder = inOrder;
+            }
+
+            public string Field
+            {
+                get { return field; }
+            }
+
+            // Called by ComplexPhraseQueryParser for each phrase after the main
+            // parse
+            // thread is through
+            protected internal void ParsePhraseElements(ComplexPhraseQueryParser qp)
+            {
+                // TODO ensure that field-sensitivity is preserved ie the query
+                // string below is parsed as
+                // field+":("+phrasedQueryStringContents+")"
+                // but this will need code in rewrite to unwrap the first layer of
+                // boolean query
+
+                string oldDefaultParserField = qp.Field;
+                try
+                {
+                    //temporarily set the QueryParser to be parsing the default field for this phrase e.g author:"fred* smith"
+                    qp.field = this.field;
+                    contents = qp.Parse(phrasedQueryStringContents);
+                }
+                finally
+                {
+                    qp.field = oldDefaultParserField;
+                }
+            }
+
+            public override Query Rewrite(IndexReader reader)
+            {
+                // ArrayList spanClauses = new ArrayList();
+                if (contents is TermQuery)
+                {
+                    return contents;
+                }
+                // Build a sequence of Span clauses arranged in a SpanNear - child
+                // clauses can be complex
+                // Booleans e.g. nots and ors etc
+                int numNegatives = 0;
+                if (!(contents is BooleanQuery))
+                {
+                    throw new ArgumentException("Unknown query type \""
+                        + contents.GetType().Name
+                        + "\" found in phrase query string \"" + phrasedQueryStringContents
+                        + "\"");
+                }
+                BooleanQuery bq = (BooleanQuery)contents;
+                BooleanClause[] bclauses = bq.Clauses;
+                SpanQuery[] allSpanClauses = new SpanQuery[bclauses.Length];
+                // For all clauses e.g. one* two~
+                for (int i = 0; i < bclauses.Length; i++)
+                {
+                    // HashSet bclauseterms=new HashSet();
+                    Query qc = bclauses[i].Query;
+                    // Rewrite this clause e.g one* becomes (one OR onerous)
+                    qc = qc.Rewrite(reader);
+                    if (bclauses[i].Occur_.Equals(BooleanClause.Occur.MUST_NOT))
+                    {
+                        numNegatives++;
+                    }
+
+                    if (qc is BooleanQuery)
+                    {
+                        List<SpanQuery> sc = new List<SpanQuery>();
+                        AddComplexPhraseClause(sc, (BooleanQuery)qc);
+                        if (sc.Count > 0)
+                        {
+                            allSpanClauses[i] = sc.ElementAt(0);
+                        }
+                        else
+                        {
+                            // Insert fake term e.g. phrase query was for "Fred Smithe*" and
+                            // there were no "Smithe*" terms - need to
+                            // prevent match on just "Fred".
+                            allSpanClauses[i] = new SpanTermQuery(new Term(field,
+                                "Dummy clause because no terms found - must match nothing"));
+                        }
+                    }
+                    else
+                    {
+                        if (qc is TermQuery)
+                        {
+                            TermQuery tq = (TermQuery)qc;
+                            allSpanClauses[i] = new SpanTermQuery(tq.Term);
+                        }
+                        else
+                        {
+                            throw new ArgumentException("Unknown query type \""
+                                + qc.GetType().Name
+                                + "\" found in phrase query string \""
+                                + phrasedQueryStringContents + "\"");
+                        }
+
+                    }
+                }
+                if (numNegatives == 0)
+                {
+                    // The simple case - no negative elements in phrase
+                    return new SpanNearQuery(allSpanClauses, slopFactor, inOrder);
+                }
+                // Complex case - we have mixed positives and negatives in the
+                // sequence.
+                // Need to return a SpanNotQuery
+                List<SpanQuery> positiveClauses = new List<SpanQuery>();
+                for (int j = 0; j < allSpanClauses.Length; j++)
+                {
+                    if (!bclauses[j].Occur_.Equals(BooleanClause.Occur.MUST_NOT))
+                    {
+                        positiveClauses.Add(allSpanClauses[j]);
+                    }
+                }
+
+                SpanQuery[] includeClauses = positiveClauses
+                    .ToArray();
+
+                SpanQuery include = null;
+                if (includeClauses.Length == 1)
+                {
+                    include = includeClauses[0]; // only one positive clause
+                }
+                else
+                {
+                    // need to increase slop factor based on gaps introduced by
+                    // negatives
+                    include = new SpanNearQuery(includeClauses, slopFactor + numNegatives,
+                        inOrder);
+                }
+                // Use sequence of positive and negative values as the exclude.
+                SpanNearQuery exclude = new SpanNearQuery(allSpanClauses, slopFactor,
+                    inOrder);
+                SpanNotQuery snot = new SpanNotQuery(include, exclude);
+                return snot;
+            }
+
+            private void AddComplexPhraseClause(List<SpanQuery> spanClauses, BooleanQuery qc)
+            {
+                List<SpanQuery> ors = new List<SpanQuery>();
+                List<SpanQuery> nots = new List<SpanQuery>();
+                BooleanClause[] bclauses = qc.Clauses;
+
+                // For all clauses e.g. one* two~
+                for (int i = 0; i < bclauses.Length; i++)
+                {
+                    Query childQuery = bclauses[i].Query;
+
+                    // select the list to which we will add these options
+                    List<SpanQuery> chosenList = ors;
+                    if (bclauses[i].Occur_ == BooleanClause.Occur.MUST_NOT)
+                    {
+                        chosenList = nots;
+                    }
+
+                    if (childQuery is TermQuery)
+                    {
+                        TermQuery tq = (TermQuery)childQuery;
+                        SpanTermQuery stq = new SpanTermQuery(tq.Term);
+                        stq.Boost = tq.Boost;
+                        chosenList.Add(stq);
+                    }
+                    else if (childQuery is BooleanQuery)
+                    {
+                        BooleanQuery cbq = (BooleanQuery)childQuery;
+                        AddComplexPhraseClause(chosenList, cbq);
+                    }
+                    else
+                    {
+                        // LUCENETODO alternatively could call extract terms here?
+                        throw new ArgumentException("Unknown query type:"
+                            + childQuery.GetType().Name);
+                    }
+                }
+                if (ors.Count == 0)
+                {
+                    return;
+                }
+                SpanOrQuery soq = new SpanOrQuery(ors
+                    .ToArray());
+                if (nots.Count == 0)
+                {
+                    spanClauses.Add(soq);
+                }
+                else
+                {
+                    SpanOrQuery snqs = new SpanOrQuery(nots
+                        .ToArray());
+                    SpanNotQuery snq = new SpanNotQuery(soq, snqs);
+                    spanClauses.Add(snq);
+                }
+            }
+
+            public override string ToString(string field)
+            {
+                return "\"" + phrasedQueryStringContents + "\"";
+            }
+
+            public override int GetHashCode()
+            {
+                int prime = 31;
+                int result = base.GetHashCode();
+                result = prime * result + ((field == null) ? 0 : field.GetHashCode());
+                result = prime
+                    * result
+                    + ((phrasedQueryStringContents == null) ? 0
+                        : phrasedQueryStringContents.GetHashCode());
+                result = prime * result + slopFactor;
+                result = prime * result + (inOrder ? 1 : 0);
+                return result;
+            }
+
+            public override bool Equals(object obj)
+            {
+                if (this == obj)
+                    return true;
+                if (obj == null)
+                    return false;
+                if (GetType() != obj.GetType())
+                    return false;
+                if (!base.Equals(obj))
+                {
+                    return false;
+                }
+                ComplexPhraseQuery other = (ComplexPhraseQuery)obj;
+                if (field == null)
+                {
+                    if (other.field != null)
+                        return false;
+                }
+                else if (!field.Equals(other.field))
+                    return false;
+                if (phrasedQueryStringContents == null)
+                {
+                    if (other.phrasedQueryStringContents != null)
+                        return false;
+                }
+                else if (!phrasedQueryStringContents
+                  .Equals(other.phrasedQueryStringContents))
+                    return false;
+                if (slopFactor != other.slopFactor)
+                    return false;
+                return inOrder == other.inOrder;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs b/src/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs
new file mode 100644
index 0000000..6418f87
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs
@@ -0,0 +1,131 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Search;
+using Lucene.Net.Util;
+using System;
+
+namespace Lucene.Net.QueryParser.Ext
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// The <see cref="ExtendableQueryParser"/> enables arbitrary query parser extension
+    /// based on a customizable field naming scheme. The lucene query syntax allows
+    /// implicit and explicit field definitions as query prefix followed by a colon
+    /// (':') character. The <see cref="ExtendableQueryParser"/> allows to encode extension
+    /// keys into the field symbol associated with a registered instance of
+    /// <see cref="ParserExtension"/>. A customizable separation character separates the
+    /// extension key from the actual field symbol. The <see cref="ExtendableQueryParser"/>
+    /// splits (<see cref="Extensions.SplitExtensionField(String, String)"/>) the
+    /// extension key from the field symbol and tries to resolve the associated
+    /// <see cref="ParserExtension"/>. If the parser can't resolve the key or the field
+    /// token does not contain a separation character, <see cref="ExtendableQueryParser"/>
+    /// yields the same behavior as its super class <see cref="QueryParser"/>. Otherwise,
+    /// if the key is associated with a <see cref="ParserExtension"/> instance, the parser
+    /// builds an instance of <see cref="ExtensionQuery"/> to be processed by
+    /// <see cref="ParserExtension.Parse(ExtensionQuery)"/>.If a extension field does not
+    /// contain a field part the default field for the query will be used.
+    /// <p>
+    /// To guarantee that an extension field is processed with its associated
+    /// extension, the extension query part must escape any special characters like
+    /// '*' or '['. If the extension query contains any whitespace characters, the
+    /// extension query part must be enclosed in quotes.
+    /// Example ('_' used as separation character):
+    /// <pre>
+    ///   title_customExt:"Apache Lucene\?" OR content_customExt:prefix\*
+    /// </pre>
+    /// 
+    /// Search on the default field:
+    /// <pre>
+    ///   _customExt:"Apache Lucene\?" OR _customExt:prefix\*
+    /// </pre>
+    /// </p>
+    /// <p>
+    /// The <see cref="ExtendableQueryParser"/> itself does not implement the logic how
+    /// field and extension key are separated or ordered. All logic regarding the
+    /// extension key and field symbol parsing is located in <see cref="Extensions"/>.
+    /// Customized extension schemes should be implemented by sub-classing
+    /// <see cref="Extensions"/>.
+    /// </p>
+    /// <p>
+    /// For details about the default encoding scheme see <see cref="Extensions"/>.
+    /// </p>
+    /// 
+    /// <see cref="Extensions"/>
+    /// <see cref="ParserExtension"/>
+    /// <see cref="ExtensionQuery"/>
+    /// </summary>
+    public class ExtendableQueryParser : Classic.QueryParser
+    {
+        private readonly string defaultField;
+        private readonly Extensions extensions;
+
+  
+        /// <summary>
+        ///  Default empty extensions instance
+        /// </summary>
+        private static readonly Extensions DEFAULT_EXTENSION = new Extensions();
+
+        /// <summary>
+        /// Creates a new <see cref="ExtendableQueryParser"/> instance
+        /// </summary>
+        /// <param name="matchVersion">the lucene version to use.</param>
+        /// <param name="f">the default query field</param>
+        /// <param name="a">the analyzer used to find terms in a query string</param>
+        public ExtendableQueryParser(LuceneVersion matchVersion, string f, Analyzer a)
+            : base(matchVersion, f, a)
+        {
+        }
+
+        /// <summary>
+        /// Creates a new <see cref="ExtendableQueryParser"/> instance
+        /// </summary>
+        /// <param name="matchVersion">the lucene version to use.</param>
+        /// <param name="f">the default query field</param>
+        /// <param name="a">the analyzer used to find terms in a query string</param>
+        /// <param name="ext">the query parser extensions</param>
+        public ExtendableQueryParser(LuceneVersion matchVersion, string f, Analyzer a, Extensions ext)
+            : base(matchVersion, f, a)
+        {
+            this.defaultField = f;
+            this.extensions = ext;
+        }
+
+        /// <summary>
+        /// Returns the extension field delimiter character.
+        /// </summary>
+        /// <returns>the extension field delimiter character.</returns>
+        public char ExtensionFieldDelimiter
+        {
+            get { return extensions.ExtensionFieldDelimiter; }
+        }
+
+        protected internal override Query GetFieldQuery(string field, string queryText, bool quoted)
+        {
+            Tuple<string, string> splitExtensionField = this.extensions
+                .SplitExtensionField(defaultField, field);
+            ParserExtension extension = this.extensions
+                .GetExtension(splitExtensionField.Item2);
+            if (extension != null)
+            {
+                return extension.Parse(new ExtensionQuery(this, splitExtensionField.Item1,
+                    queryText));
+            }
+            return base.GetFieldQuery(field, queryText, quoted);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs b/src/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs
new file mode 100644
index 0000000..610e4ad
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs
@@ -0,0 +1,54 @@
+\ufeffnamespace Lucene.Net.QueryParser.Ext
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// <see cref="ExtensionQuery"/> holds all query components extracted from the original
+    /// query string like the query field and the extension query string.
+    /// </summary>
+    public class ExtensionQuery
+    {
+        /// <summary>
+        /// Creates a new <see cref="ExtensionQuery"/>
+        /// </summary>
+        /// <param name="topLevelParser"></param>
+        /// <param name="field">the query field</param>
+        /// <param name="rawQueryString">the raw extension query string</param>
+        public ExtensionQuery(Classic.QueryParser topLevelParser, string field, string rawQueryString)
+        {
+            this.Field = field;
+            this.RawQueryString = rawQueryString;
+            this.TopLevelParser = topLevelParser;
+        }
+
+        /// <summary>
+        /// Returns the query field
+        /// </summary>
+        public string Field { get; protected set; }
+
+        /// <summary>
+        /// Returns the raw extension query string
+        /// </summary>
+        public string RawQueryString { get; protected set; }
+
+        /// <summary>
+        /// Returns the top level parser which created this <see cref="ExtensionQuery"/>
+        /// </summary>
+        public Classic.QueryParser TopLevelParser { get; protected set; }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Ext/Extensions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Ext/Extensions.cs b/src/Lucene.Net.QueryParser/Ext/Extensions.cs
new file mode 100644
index 0000000..6895268
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Ext/Extensions.cs
@@ -0,0 +1,167 @@
+\ufeffusing Lucene.Net.QueryParser.Classic;
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Ext
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public class Extensions
+    {
+        private readonly IDictionary<string, ParserExtension> extensions = new Dictionary<string, ParserExtension>();
+        private readonly char extensionFieldDelimiter;
+
+        /// <summary>
+        /// The default extension field delimiter character. This constant is set to ':'
+        /// </summary>
+        public static readonly char DEFAULT_EXTENSION_FIELD_DELIMITER = ':';
+
+        /// <summary>
+        /// Creates a new <see cref="Extensions"/> instance with the
+        /// <see cref="#DEFAULT_EXTENSION_FIELD_DELIMITER"/> as a delimiter character.
+        /// </summary>
+        public Extensions()
+            : this(DEFAULT_EXTENSION_FIELD_DELIMITER)
+        {
+        }
+
+        /// <summary>
+        /// Creates a new <see cref="Extensions"/> instance
+        /// </summary>
+        /// <param name="extensionFieldDelimiter">the extensions field delimiter character</param>
+        public Extensions(char extensionFieldDelimiter)
+        {
+            this.extensionFieldDelimiter = extensionFieldDelimiter;
+        }
+
+        /// <summary>
+        /// Adds a new <see cref="ParserExtension"/> instance associated with the given key.
+        /// </summary>
+        /// <param name="key">the parser extension key</param>
+        /// <param name="extension">the parser extension</param>
+        public virtual void Add(string key, ParserExtension extension)
+        {
+            this.extensions[key] = extension;
+        }
+
+        /// <summary>
+        /// Returns the <see cref="ParserExtension"/> instance for the given key or
+        /// <code>null</code> if no extension can be found for the key.
+        /// </summary>
+        /// <param name="key">the extension key</param>
+        /// <returns>the <see cref="ParserExtension"/> instance for the given key or
+        /// <code>null</code> if no extension can be found for the key.</returns>
+        public ParserExtension GetExtension(string key)
+        {
+            if (key == null || !this.extensions.ContainsKey(key)) return null;
+            return this.extensions[key];
+        }
+
+        /// <summary>
+        /// Returns the extension field delimiter
+        /// </summary>
+        public virtual char ExtensionFieldDelimiter
+        {
+            get { return extensionFieldDelimiter; }
+        }
+
+        /// <summary>
+        /// Splits a extension field and returns the field / extension part as a
+        /// <see cref="Tuple{String,String}"/>. This method tries to split on the first occurrence of the
+        /// extension field delimiter, if the delimiter is not present in the string
+        /// the result will contain a <code>null</code> value for the extension key and
+        /// the given field string as the field value. If the given extension field
+        /// string contains no field identifier the result pair will carry the given
+        /// default field as the field value.
+        /// </summary>
+        /// <param name="defaultField">the default query field</param>
+        /// <param name="field">the extension field string</param>
+        /// <returns>a {<see cref="Tuple{String,String}"/> with the field name as the <see cref="Tuple{String,String}.Item1"/> and the
+        /// extension key as the <see cref="Tuple{String,String}.Item2"/></returns>
+        public Tuple<string, string> SplitExtensionField(string defaultField, string field)
+        {
+            int indexOf = field.IndexOf(this.extensionFieldDelimiter);
+            if (indexOf < 0)
+                return new Tuple<string, string>(field, null);
+            string indexField = indexOf == 0 ? defaultField : field.Substring(0, indexOf);
+            string extensionKey = field.Substring(indexOf + 1);
+            return new Tuple<string, string>(indexField, extensionKey);
+        }
+
+        /// <summary>
+        /// Escapes an extension field. The default implementation is equivalent to
+        /// <see cref="QueryParser.Escape(String)"/>.
+        /// </summary>
+        /// <param name="extfield">the extension field identifier</param>
+        /// <returns>the extension field identifier with all special chars escaped with
+        /// a backslash character.</returns>
+        public string EscapeExtensionField(string extfield)
+        {
+            return QueryParserBase.Escape(extfield);
+        }
+
+        /// <summary>
+        /// Builds an extension field string from a given extension key and the default
+        /// query field. The default field and the key are delimited with the extension
+        /// field delimiter character. This method makes no assumption about the order
+        /// of the extension key and the field. By default the extension key is
+        /// appended to the end of the returned string while the field is added to the
+        /// beginning. Special Query characters are escaped in the result.
+        /// <p>
+        /// Note: <see cref="Extensions"/> subclasses must maintain the contract between
+        /// <see cref="M:BuildExtensionField(String)"/> and
+        /// <see cref="M:BuildExtensionField(String, String)"/> where the latter inverts the
+        /// former.
+        /// </p>
+        /// </summary>
+        /// <param name="extensionKey">the extension key</param>
+        /// <returns>escaped extension field identifier</returns>
+        public string BuildExtensionField(string extensionKey)
+        {
+            return BuildExtensionField(extensionKey, "");
+        }
+
+        /// <summary>
+        /// Builds an extension field string from a given extension key and the default
+        /// query field. The default field and the key are delimited with the extension
+        /// field delimiter character. This method makes no assumption about the order
+        /// of the extension key and the field. By default the extension key is
+        /// appended to the end of the returned string while the field is added to the
+        /// beginning. Special Query characters are escaped in the result.
+        /// <p>
+        /// Note: <see cref="Extensions"/> subclasses must maintain the contract between
+        /// <see cref="M:BuildExtensionField(String)"/> and
+        /// <see cref="M:BuildExtensionField(String, String)"/> where the latter inverts the
+        /// former.
+        /// </summary>
+        /// <param name="extensionKey">the extension key</param>
+        /// <param name="field">the field to apply the extension on.</param>
+        /// <returns>escaped extension field identifier</returns>
+        /// <remarks>See <see cref="M:BuildExtensionField(String)"/> to use the default query field</remarks>
+        public string BuildExtensionField(string extensionKey, string field)
+        {
+            StringBuilder builder = new StringBuilder(field);
+            builder.Append(this.extensionFieldDelimiter);
+            builder.Append(extensionKey);
+            return EscapeExtensionField(builder.ToString());
+        }
+
+        // NOTE: Pair<T, T> was eliminated in favor of the built in Tuple<T, T> type.
+    }
+}


[44/50] [abbrv] lucenenet git commit: Fixed RandomIndexWriter call because the overload that is supposed to be used was removed.

Posted by sy...@apache.org.
Fixed RandomIndexWriter call because the overload that is supposed to be used was removed.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/10dc873b
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/10dc873b
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/10dc873b

Branch: refs/heads/master
Commit: 10dc873b1b5d5c1f54e1c0e41ea8b03380864e1a
Parents: 2f07fa2
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Fri Sep 2 23:12:55 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 23:12:55 2016 +0700

----------------------------------------------------------------------
 .../Analyzing/TestAnalyzingQueryParser.cs                        | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/10dc873b/src/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs
index 10756cf..77c4f9f 100644
--- a/src/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs
+++ b/src/Lucene.Net.Tests.QueryParser/Analyzing/TestAnalyzingQueryParser.cs
@@ -310,7 +310,9 @@ namespace Lucene.Net.QueryParser.Analyzing
             int hits;
             using (Directory ramDir = NewDirectory())
             {
-                using (RandomIndexWriter writer = new RandomIndexWriter(Random(), ramDir, analyzer))
+                // LUCENENET TODO: It seems the overload this test depends on has been removed from the RandomIndexWriter
+                //using (RandomIndexWriter writer = new RandomIndexWriter(Random(), ramDir, analyzer))
+                using (RandomIndexWriter writer = new RandomIndexWriter(Random(), ramDir, NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, analyzer)))
                 {
                     Document doc = new Document();
                     FieldType fieldType = new FieldType();


[35/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Classic/QueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/QueryParser.cs b/Lucene.Net.QueryParser/Classic/QueryParser.cs
deleted file mode 100644
index e86c716..0000000
--- a/Lucene.Net.QueryParser/Classic/QueryParser.cs
+++ /dev/null
@@ -1,921 +0,0 @@
-using Lucene.Net.Analysis;
-using Lucene.Net.Search;
-using Lucene.Net.Util;
-using System;
-using System.Collections.Generic;
-using System.IO;
-
-namespace Lucene.Net.QueryParser.Classic
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary> This class is generated by JavaCC.  The most important method is
-    /// <see cref="Parse(String)" />.
-    /// 
-    /// The syntax for query strings is as follows:
-    /// A Query is a series of clauses.
-    /// A clause may be prefixed by:
-    /// <list type="bullet">
-    /// <item> a plus (<c>+</c>) or a minus (<c>-</c>) sign, indicating
-    /// that the clause is required or prohibited respectively; or</item>
-    /// <item> a term followed by a colon, indicating the field to be searched.
-    /// This enables one to construct queries which search multiple fields.</item>
-    /// </list>
-    /// 
-    /// A clause may be either:
-    /// <list type="bullet">
-    /// <item> a term, indicating all the documents that contain this term; or</item>
-    /// <item> a nested query, enclosed in parentheses.  Note that this may be used
-    /// with a <c>+</c>/<c>-</c> prefix to require any of a set of
-    /// terms.</item>
-    /// </list>
-    /// 
-    /// Thus, in BNF, the query grammar is:
-    /// <code>
-    /// Query  ::= ( Clause )*
-    /// Clause ::= ["+", "-"] [&lt;TERM&gt; ":"] ( &lt;TERM&gt; | "(" Query ")" )
-    /// </code>
-    /// 
-    /// <p/>
-    /// Examples of appropriately formatted queries can be found in the <a
-    /// href="../../../../../../queryparsersyntax.html">query syntax
-    /// documentation</a>.
-    /// <p/>
-    /// 
-    /// <p/>
-    /// In <see cref="TermRangeQuery" />s, QueryParser tries to detect date values, e.g.
-    /// <tt>date:[6/1/2005 TO 6/4/2005]</tt> produces a range query that searches
-    /// for "date" fields between 2005-06-01 and 2005-06-04. Note that the format
-    /// of the accepted input depends on the <see cref="Locale" />.
-    /// A <see cref="Lucene.Net.Documents.DateTools.Resolution" /> has to be set,
-    /// if you want to use <see cref="DateTools"/> for date conversion.<p/>
-    /// <p/>
-    /// The date resolution that shall be used for RangeQueries can be set
-    /// using <see cref="SetDateResolution(DateTools.Resolution)" />
-    /// or <see cref="SetDateResolution(String, DateTools.Resolution)" />. The former
-    /// sets the default date resolution for all fields, whereas the latter can
-    /// be used to set field specific date resolutions. Field specific date
-    /// resolutions take, if set, precedence over the default date resolution.
-    /// <p/>
-    /// <p/>
-    /// If you don't use <see cref="DateTools" /> in your index, you can create your own
-    /// query parser that inherits QueryParser and overwrites
-    /// <see cref="GetRangeQuery(String, String, String, bool)" /> to
-    /// use a different method for date conversion.
-    /// <p/>
-    /// 
-    /// <p/>Note that QueryParser is <em>not</em> thread-safe.<p/> 
-    /// 
-    /// <p/><b>NOTE</b>: there is a new QueryParser in contrib, which matches
-    /// the same syntax as this class, but is more modular,
-    /// enabling substantial customization to how a query is created.
-    /// 
-    /// <b>NOTE</b>: You must specify the required <see cref="LuceneVersion" /> compatibility when
-    /// creating QueryParser:
-    /// <list type="bullet">
-    /// <item>As of 3.1, <see cref="AutoGeneratePhraseQueries"/> is false by default.</item>
-    /// </list>
-    /// </summary>
-    public class QueryParser : QueryParserBase
-    {
-        // NOTE: This was moved into the QueryParserBase class.
-
-        ///* The default operator_Renamed for parsing queries. 
-        // * Use {@link QueryParser#setDefaultOperator} to change it.
-        // */
-
-        //public enum Operator
-        //{
-        //    OR,
-        //    AND
-        //}
-
-        /// <summary>
-        /// Constructs a query parser.
-        /// </summary>
-        /// <param name="matchVersion">Lucene version to match.</param>
-        /// <param name="f">the default field for query terms.</param>
-        /// <param name="a">used to find terms in the query text.</param>
-        public QueryParser(LuceneVersion matchVersion, string f, Analyzer a)
-            : this(new FastCharStream(new StringReader("")))
-        {
-            Init(matchVersion, f, a);
-        }
-
-        // *   Query  ::= ( Clause )*
-        // *   Clause ::= ["+", "-"] [<TermToken> ":"] ( <TermToken> | "(" Query ")" )
-        public int Conjunction()
-        {
-            int ret = CONJ_NONE;
-            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-            {
-                case RegexpToken.AND:
-                case RegexpToken.OR:
-                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                    {
-                        case RegexpToken.AND:
-                            Jj_consume_token(RegexpToken.AND);
-                            ret = CONJ_AND;
-                            break;
-                        case RegexpToken.OR:
-                            Jj_consume_token(RegexpToken.OR);
-                            ret = CONJ_OR;
-                            break;
-                        default:
-                            jj_la1[0] = jj_gen;
-                            Jj_consume_token(-1);
-                            throw new ParseException();
-                    }
-                    break;
-                default:
-                    jj_la1[1] = jj_gen;
-                    break;
-            }
-            {
-                if (true) return ret;
-            }
-            throw new ApplicationException("Missing return statement in function");
-        }
-
-        public int Modifiers()
-        {
-            int ret = MOD_NONE;
-            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-            {
-                case RegexpToken.NOT:
-                case RegexpToken.PLUS:
-                case RegexpToken.MINUS:
-                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                    {
-                        case RegexpToken.PLUS:
-                            Jj_consume_token(RegexpToken.PLUS);
-                            ret = MOD_REQ;
-                            break;
-                        case RegexpToken.MINUS:
-                            Jj_consume_token(RegexpToken.MINUS);
-                            ret = MOD_NOT;
-                            break;
-                        case RegexpToken.NOT:
-                            Jj_consume_token(RegexpToken.NOT);
-                            ret = MOD_NOT;
-                            break;
-                        default:
-                            jj_la1[2] = jj_gen;
-                            Jj_consume_token(-1);
-                            throw new ParseException();
-                    }
-                    break;
-                default:
-                    jj_la1[3] = jj_gen;
-                    break;
-            }
-            {
-                if (true) return ret;
-            }
-            throw new Exception("Missing return statement in function");
-        }
-
-        // This makes sure that there is no garbage after the query string
-        public override Query TopLevelQuery(string field)
-        {
-            Query q;
-            q = Query(field);
-            Jj_consume_token(0);
-            {
-                if (true) return q;
-            }
-            throw new Exception("Missing return statement in function");
-        }
-
-        public Query Query(string field)
-        {
-            List<BooleanClause> clauses = new List<BooleanClause>();
-            Query q, firstQuery = null;
-            int conj, mods;
-            mods = Modifiers();
-            q = Clause(field);
-            AddClause(clauses, CONJ_NONE, mods, q);
-            if (mods == MOD_NONE)
-                firstQuery = q;
-            while (true)
-            {
-                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                {
-                    case RegexpToken.AND:
-                    case RegexpToken.OR:
-                    case RegexpToken.NOT:
-                    case RegexpToken.PLUS:
-                    case RegexpToken.MINUS:
-                    case RegexpToken.BAREOPER:
-                    case RegexpToken.LPAREN:
-                    case RegexpToken.STAR:
-                    case RegexpToken.QUOTED:
-                    case RegexpToken.TERM:
-                    case RegexpToken.PREFIXTERM:
-                    case RegexpToken.WILDTERM:
-                    case RegexpToken.REGEXPTERM:
-                    case RegexpToken.RANGEIN_START:
-                    case RegexpToken.RANGEEX_START:
-                    case RegexpToken.NUMBER:
-                        break;
-                    default:
-                        jj_la1[4] = jj_gen;
-                        goto label_1;
-                }
-
-                conj = Conjunction();
-                mods = Modifiers();
-                q = Clause(field);
-                AddClause(clauses, conj, mods, q);
-            }
-
-        label_1:
-
-            if (clauses.Count == 1 && firstQuery != null)
-            {
-                if (true) return firstQuery;
-            }
-
-            return GetBooleanQuery(clauses);
-        }
-
-        public Query Clause(string field)
-        {
-            Query q;
-            Token fieldToken = null, boost = null;
-            if (Jj_2_1(2))
-            {
-                switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                {
-                    case RegexpToken.TERM:
-                        fieldToken = Jj_consume_token(RegexpToken.TERM);
-                        Jj_consume_token(RegexpToken.COLON);
-                        field = DiscardEscapeChar(fieldToken.image);
-                        break;
-                    case RegexpToken.STAR:
-                        Jj_consume_token(RegexpToken.STAR);
-                        Jj_consume_token(RegexpToken.COLON);
-                        field = "*";
-                        break;
-                    default:
-                        jj_la1[5] = jj_gen;
-                        Jj_consume_token(-1);
-                        throw new ParseException();
-                }
-            }
-            else
-            {
-                ;
-            }
-            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-            {
-                case RegexpToken.BAREOPER:
-                case RegexpToken.STAR:
-                case RegexpToken.QUOTED:
-                case RegexpToken.TERM:
-                case RegexpToken.PREFIXTERM:
-                case RegexpToken.WILDTERM:
-                case RegexpToken.REGEXPTERM:
-                case RegexpToken.RANGEIN_START:
-                case RegexpToken.RANGEEX_START:
-                case RegexpToken.NUMBER:
-                    q = Term(field);
-                    break;
-                case RegexpToken.LPAREN:
-                    Jj_consume_token(RegexpToken.LPAREN);
-                    q = Query(field);
-                    Jj_consume_token(RegexpToken.RPAREN);
-                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                    {
-                        case RegexpToken.CARAT:
-                            Jj_consume_token(RegexpToken.CARAT);
-                            boost = Jj_consume_token(RegexpToken.NUMBER);
-                            break;
-                        default:
-                            jj_la1[6] = jj_gen;
-                            break;
-                    }
-                    break;
-                default:
-                    jj_la1[7] = jj_gen;
-                    Jj_consume_token(-1);
-                    throw new ParseException();
-            }
-            {
-                if (true) return HandleBoost(q, boost);
-            }
-            throw new Exception("Missing return statement in function");
-        }
-
-        public Query Term(String field)
-        {
-            Token term, boost = null, fuzzySlop = null, goop1, goop2;
-            bool prefix = false;
-            bool wildcard = false;
-            bool fuzzy = false;
-            bool regexp = false;
-            bool startInc = false;
-            bool endInc = false;
-            Query q;
-            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-            {
-                case RegexpToken.BAREOPER:
-                case RegexpToken.STAR:
-                case RegexpToken.TERM:
-                case RegexpToken.PREFIXTERM:
-                case RegexpToken.WILDTERM:
-                case RegexpToken.REGEXPTERM:
-                case RegexpToken.NUMBER:
-                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                    {
-                        case RegexpToken.TERM:
-                            term = Jj_consume_token(RegexpToken.TERM);
-                            break;
-                        case RegexpToken.STAR:
-                            term = Jj_consume_token(RegexpToken.STAR);
-                            wildcard = true;
-                            break;
-                        case RegexpToken.PREFIXTERM:
-                            term = Jj_consume_token(RegexpToken.PREFIXTERM);
-                            prefix = true;
-                            break;
-                        case RegexpToken.WILDTERM:
-                            term = Jj_consume_token(RegexpToken.WILDTERM);
-                            wildcard = true;
-                            break;
-                        case RegexpToken.REGEXPTERM:
-                            term = Jj_consume_token(RegexpToken.REGEXPTERM);
-                            regexp = true;
-                            break;
-                        case RegexpToken.NUMBER:
-                            term = Jj_consume_token(RegexpToken.NUMBER);
-                            break;
-                        case RegexpToken.BAREOPER:
-                            term = Jj_consume_token(RegexpToken.BAREOPER);
-                            term.image = term.image.Substring(0, 1);
-                            break;
-                        default:
-                            jj_la1[8] = jj_gen;
-                            Jj_consume_token(-1);
-                            throw new ParseException();
-                    }
-                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                    {
-                        case RegexpToken.FUZZY_SLOP:
-                            fuzzySlop = Jj_consume_token(RegexpToken.FUZZY_SLOP);
-                            fuzzy = true;
-                            break;
-                        default:
-                            jj_la1[9] = jj_gen;
-                            break;
-                    }
-                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                    {
-                        case RegexpToken.CARAT:
-                            Jj_consume_token(RegexpToken.CARAT);
-                            boost = Jj_consume_token(RegexpToken.NUMBER);
-                            switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                            {
-                                case RegexpToken.FUZZY_SLOP:
-                                    fuzzySlop = Jj_consume_token(RegexpToken.FUZZY_SLOP);
-                                    fuzzy = true;
-                                    break;
-                                default:
-                                    jj_la1[10] = jj_gen;
-                                    break;
-                            }
-                            break;
-                        default:
-                            jj_la1[11] = jj_gen;
-                            break;
-                    }
-                    q = HandleBareTokenQuery(field, term, fuzzySlop, prefix, wildcard, fuzzy, regexp);
-                    break;
-                case RegexpToken.RANGEIN_START:
-                case RegexpToken.RANGEEX_START:
-                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                    {
-                        case RegexpToken.RANGEIN_START:
-                            Jj_consume_token(RegexpToken.RANGEIN_START);
-                            startInc = true;
-                            break;
-                        case RegexpToken.RANGEEX_START:
-                            Jj_consume_token(RegexpToken.RANGEEX_START);
-                            break;
-                        default:
-                            jj_la1[12] = jj_gen;
-                            Jj_consume_token(-1);
-                            throw new ParseException();
-                    }
-                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                    {
-                        case RegexpToken.RANGE_GOOP:
-                            goop1 = Jj_consume_token(RegexpToken.RANGE_GOOP);
-                            break;
-                        case RegexpToken.RANGE_QUOTED:
-                            goop1 = Jj_consume_token(RegexpToken.RANGE_QUOTED);
-                            break;
-                        default:
-                            jj_la1[13] = jj_gen;
-                            Jj_consume_token(-1);
-                            throw new ParseException();
-                    }
-                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                    {
-                        case RegexpToken.RANGE_TO:
-                            Jj_consume_token(RegexpToken.RANGE_TO);
-                            break;
-                        default:
-                            jj_la1[14] = jj_gen;
-                            break;
-                    }
-                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                    {
-                        case RegexpToken.RANGE_GOOP:
-                            goop2 = Jj_consume_token(RegexpToken.RANGE_GOOP);
-                            break;
-                        case RegexpToken.RANGE_QUOTED:
-                            goop2 = Jj_consume_token(RegexpToken.RANGE_QUOTED);
-                            break;
-                        default:
-                            jj_la1[15] = jj_gen;
-                            Jj_consume_token(-1);
-                            throw new ParseException();
-                    }
-                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                    {
-                        case RegexpToken.RANGEIN_END:
-                            Jj_consume_token(RegexpToken.RANGEIN_END);
-                            endInc = true;
-                            break;
-                        case RegexpToken.RANGEEX_END:
-                            Jj_consume_token(RegexpToken.RANGEEX_END);
-                            break;
-                        default:
-                            jj_la1[16] = jj_gen;
-                            Jj_consume_token(-1);
-                            throw new ParseException();
-                    }
-                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                    {
-                        case RegexpToken.CARAT:
-                            Jj_consume_token(RegexpToken.CARAT);
-                            boost = Jj_consume_token(RegexpToken.NUMBER);
-                            break;
-                        default:
-                            jj_la1[17] = jj_gen;
-                            break;
-                    }
-                    bool startOpen = false;
-                    bool endOpen = false;
-                    if (goop1.kind == RegexpToken.RANGE_QUOTED)
-                    {
-                        goop1.image = goop1.image.Substring(1, goop1.image.Length - 2);
-                    }
-                    else if ("*".Equals(goop1.image))
-                    {
-                        startOpen = true;
-                    }
-                    if (goop2.kind == RegexpToken.RANGE_QUOTED)
-                    {
-                        goop2.image = goop2.image.Substring(1, goop2.image.Length - 2);
-                    }
-                    else if ("*".Equals(goop2.image))
-                    {
-                        endOpen = true;
-                    }
-                    q = GetRangeQuery(field, startOpen ? null : DiscardEscapeChar(goop1.image), endOpen ? null : DiscardEscapeChar(goop2.image), startInc, endInc);
-                    break;
-                case RegexpToken.QUOTED:
-                    term = Jj_consume_token(RegexpToken.QUOTED);
-                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                    {
-                        case RegexpToken.FUZZY_SLOP:
-                            fuzzySlop = Jj_consume_token(RegexpToken.FUZZY_SLOP);
-                            break;
-                        default:
-                            jj_la1[18] = jj_gen;
-                            break;
-                    }
-                    switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
-                    {
-                        case RegexpToken.CARAT:
-                            Jj_consume_token(RegexpToken.CARAT);
-                            boost = Jj_consume_token(RegexpToken.NUMBER);
-                            break;
-                        default:
-                            jj_la1[19] = jj_gen;
-                            break;
-                    }
-                    q = HandleQuotedTerm(field, term, fuzzySlop);
-                    break;
-                default:
-                    jj_la1[20] = jj_gen;
-                    Jj_consume_token(-1);
-                    throw new ParseException();
-            }
-            { if (true) return HandleBoost(q, boost); }
-            throw new Exception("Missing return statement in function");
-        }
-
-        private bool Jj_2_1(int xla)
-        {
-            jj_la = xla;
-            jj_lastpos = jj_scanpos = token;
-            try
-            {
-                return !Jj_3_1();
-            }
-            catch (LookaheadSuccess)
-            {
-                return true;
-            }
-            finally
-            {
-                Jj_save(0, xla);
-            }
-        }
-
-        private bool Jj_3R_2()
-        {
-            if (Jj_scan_token(RegexpToken.TERM)) return true;
-            if (Jj_scan_token(RegexpToken.COLON)) return true;
-            return false;
-        }
-
-        private bool Jj_3_1()
-        {
-            Token xsp;
-            xsp = jj_scanpos;
-            if (Jj_3R_2())
-            {
-                jj_scanpos = xsp;
-                if (Jj_3R_3()) return true;
-            }
-            return false;
-        }
-
-        private bool Jj_3R_3()
-        {
-            if (Jj_scan_token(RegexpToken.STAR)) return true;
-            if (Jj_scan_token(RegexpToken.COLON)) return true;
-            return false;
-        }
-
-        /* Generated Token Manager. */
-        public QueryParserTokenManager token_source;
-        /* Current token. */
-        public Token token;
-        /* Next token. */
-        public Token jj_nt;
-        private int jj_ntk;
-        private Token jj_scanpos, jj_lastpos;
-        private int jj_la;
-        private int jj_gen;
-        private int[] jj_la1 = new int[21];
-        private static uint[] jj_la1_0;
-        private static int[] jj_la1_1;
-
-        static QueryParser()
-        {
-            {
-                Jj_la1_init_0();
-                Jj_la1_init_1();
-            }
-        }
-
-        private static void Jj_la1_init_0()
-        {
-            jj_la1_0 = new uint[] 
-            { 
-                0x300, 0x300, 0x1c00, 0x1c00, 0xfda7f00, 0x120000, 0x40000, 0xfda6000, 0x9d22000, 0x200000, 
-                0x200000, 0x40000, 0x6000000, 0x80000000, 0x10000000, 0x80000000, 0x60000000, 0x40000, 
-                0x200000, 0x40000, 0xfda2000, 
-            };
-        }
-
-        private static void Jj_la1_init_1()
-        {
-            jj_la1_1 = new int[] 
-            { 
-                0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 
-                0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 
-            };
-        }
-
-        private JJCalls[] jj_2_rtns = new JJCalls[1];
-        private bool jj_rescan = false;
-        private int jj_gc = 0;
-
-        /// <summary>Constructor with user supplied CharStream. </summary>
-        protected internal QueryParser(ICharStream stream)
-        {
-            token_source = new QueryParserTokenManager(stream);
-            token = new Token();
-            jj_ntk = -1;
-            jj_gen = 0;
-            for (int i = 0; i < 21; i++) jj_la1[i] = -1;
-            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
-        }
-
-        /// <summary>Reinitialise. </summary>
-        public override void ReInit(ICharStream stream)
-        {
-            token_source.ReInit(stream);
-            token = new Token();
-            jj_ntk = -1;
-            jj_gen = 0;
-            for (int i = 0; i < 21; i++) jj_la1[i] = -1;
-            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
-        }
-
-        /// <summary>Constructor with generated Token Manager. </summary>
-        protected QueryParser(QueryParserTokenManager tm)
-        {
-            token_source = tm;
-            token = new Token();
-            jj_ntk = -1;
-            jj_gen = 0;
-            for (int i = 0; i < 21; i++) jj_la1[i] = -1;
-            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
-        }
-
-        /// <summary>Reinitialise. </summary>
-        public void ReInit(QueryParserTokenManager tm)
-        {
-            token_source = tm;
-            token = new Token();
-            jj_ntk = -1;
-            jj_gen = 0;
-            for (int i = 0; i < 21; i++) jj_la1[i] = -1;
-            for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
-        }
-
-        private Token Jj_consume_token(int kind)
-        {
-            Token oldToken;
-            if ((oldToken = token).next != null) token = token.next;
-            else token = token.next = token_source.GetNextToken();
-            jj_ntk = -1;
-            if (token.kind == kind)
-            {
-                jj_gen++;
-                if (++jj_gc > 100)
-                {
-                    jj_gc = 0;
-                    for (int i = 0; i < jj_2_rtns.Length; i++)
-                    {
-                        JJCalls c = jj_2_rtns[i];
-                        while (c != null)
-                        {
-                            if (c.gen < jj_gen) c.first = null;
-                            c = c.next;
-                        }
-                    }
-                }
-                return token;
-            }
-            token = oldToken;
-            jj_kind = kind;
-            throw GenerateParseException();
-        }
-
-        [Serializable]
-        private sealed class LookaheadSuccess : Exception
-        {
-        }
-
-        private LookaheadSuccess jj_ls = new LookaheadSuccess();
-        private bool Jj_scan_token(int kind)
-        {
-            if (jj_scanpos == jj_lastpos)
-            {
-                jj_la--;
-                if (jj_scanpos.next == null)
-                {
-                    jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.GetNextToken();
-                }
-                else
-                {
-                    jj_lastpos = jj_scanpos = jj_scanpos.next;
-                }
-            }
-            else
-            {
-                jj_scanpos = jj_scanpos.next;
-            }
-            if (jj_rescan)
-            {
-                int i = 0;
-                Token tok = token;
-                while (tok != null && tok != jj_scanpos)
-                {
-                    i++;
-                    tok = tok.next;
-                }
-                if (tok != null) Jj_add_error_token(kind, i);
-            }
-            if (jj_scanpos.kind != kind) return true;
-            if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;
-            return false;
-        }
-
-        /// <summary>Get the next Token. </summary>
-        public Token GetNextToken()
-        {
-            if (token.next != null) token = token.next;
-            else token = token.next = token_source.GetNextToken();
-            jj_ntk = -1;
-            jj_gen++;
-            return token;
-        }
-
-        /// <summary>Get the specific Token. </summary>
-        public Token GetToken(int index)
-        {
-            Token t = token;
-            for (int i = 0; i < index; i++)
-            {
-                if (t.next != null) t = t.next;
-                else t = t.next = token_source.GetNextToken();
-            }
-            return t;
-        }
-
-        private int Jj_ntk()
-        {
-            if ((jj_nt = token.next) == null)
-                return (jj_ntk = (token.next = token_source.GetNextToken()).kind);
-            else
-                return (jj_ntk = jj_nt.kind);
-        }
-
-        private List<int[]> jj_expentries = new List<int[]>();
-        private int[] jj_expentry;
-        private int jj_kind = -1;
-        private int[] jj_lasttokens = new int[100];
-        private int jj_endpos;
-
-        private void Jj_add_error_token(int kind, int pos)
-        {
-            if (pos >= 100) return;
-            if (pos == jj_endpos + 1)
-            {
-                jj_lasttokens[jj_endpos++] = kind;
-            }
-            else if (jj_endpos != 0)
-            {
-                jj_expentry = new int[jj_endpos];
-                for (int i = 0; i < jj_endpos; i++)
-                {
-                    jj_expentry[i] = jj_lasttokens[i];
-                }
-
-                foreach (var oldentry in jj_expentries)
-                {
-                    if (oldentry.Length == jj_expentry.Length)
-                    {
-                        for (int i = 0; i < jj_expentry.Length; i++)
-                        {
-                            if (oldentry[i] != jj_expentry[i])
-                            {
-                                continue;
-                            }
-                        }
-                        jj_expentries.Add(jj_expentry);
-                        break;
-                    }
-                }
-                if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind;
-            }
-        }
-
-        /// <summary>Generate ParseException. </summary>
-        public virtual ParseException GenerateParseException()
-        {
-            jj_expentries.Clear();
-            bool[] la1tokens = new bool[33];
-            if (jj_kind >= 0)
-            {
-                la1tokens[jj_kind] = true;
-                jj_kind = -1;
-            }
-            for (int i = 0; i < 21; i++)
-            {
-                if (jj_la1[i] == jj_gen)
-                {
-                    for (int j = 0; j < 32; j++)
-                    {
-                        if ((jj_la1_0[i] & (1 << j)) != 0)
-                        {
-                            la1tokens[j] = true;
-                        }
-                        if ((jj_la1_1[i] & (1 << j)) != 0)
-                        {
-                            la1tokens[32 + j] = true;
-                        }
-                    }
-                }
-            }
-            for (int i = 0; i < 33; i++)
-            {
-                if (la1tokens[i])
-                {
-                    jj_expentry = new int[1];
-                    jj_expentry[0] = i;
-                    jj_expentries.Add(jj_expentry);
-                }
-            }
-            jj_endpos = 0;
-            Jj_rescan_token();
-            Jj_add_error_token(0, 0);
-            int[][] exptokseq = new int[jj_expentries.Count][];
-            for (int i = 0; i < jj_expentries.Count; i++)
-            {
-                exptokseq[i] = jj_expentries[i];
-            }
-            return new ParseException(token, exptokseq, QueryParserConstants.TokenImage);
-        }
-
-        /// <summary>Enable tracing. </summary>
-        public void Enable_tracing()
-        {
-        }
-
-        /// <summary>Disable tracing. </summary>
-        public void Disable_tracing()
-        {
-        }
-
-        private void Jj_rescan_token()
-        {
-            jj_rescan = true;
-            for (int i = 0; i < 1; i++)
-            {
-                try
-                {
-                    JJCalls p = jj_2_rtns[i];
-                    do
-                    {
-                        if (p.gen > jj_gen)
-                        {
-                            jj_la = p.arg;
-                            jj_lastpos = jj_scanpos = p.first;
-                            switch (i)
-                            {
-                                case 0:
-                                    Jj_3_1();
-                                    break;
-                            }
-                        }
-                        p = p.next;
-                    } while (p != null);
-                }
-                catch (LookaheadSuccess)
-                {
-                }
-            }
-            jj_rescan = false;
-        }
-
-        private void Jj_save(int index, int xla)
-        {
-            JJCalls p = jj_2_rtns[index];
-            while (p.gen > jj_gen)
-            {
-                if (p.next == null)
-                {
-                    p = p.next = new JJCalls();
-                    break;
-                }
-                p = p.next;
-            }
-            p.gen = jj_gen + xla - jj_la;
-            p.first = token;
-            p.arg = xla;
-        }
-
-        internal sealed class JJCalls
-        {
-            internal int gen;
-            internal Token first;
-            internal int arg;
-            internal JJCalls next;
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/QueryParserBase.cs b/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
deleted file mode 100644
index 599110e..0000000
--- a/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
+++ /dev/null
@@ -1,1025 +0,0 @@
-\ufeffusing Lucene.Net.Analysis;
-using Lucene.Net.Analysis.Tokenattributes;
-using Lucene.Net.Documents;
-using Lucene.Net.Index;
-using Lucene.Net.QueryParser.Classic;
-using Lucene.Net.QueryParser.Flexible.Standard;
-using Lucene.Net.Search;
-using Lucene.Net.Support;
-using Lucene.Net.Util;
-using System;
-using System.Collections.Generic;
-using System.Globalization;
-using System.IO;
-using System.Text;
-
-namespace Lucene.Net.QueryParser.Classic
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    public abstract partial class QueryParserBase : QueryBuilder, ICommonQueryParserConfiguration
-    {
-        /// <summary>
-        /// Do not catch this exception in your code, it means you are using methods that you should no longer use.
-        /// </summary>
-        public class MethodRemovedUseAnother : Exception {}
-
-        protected const int CONJ_NONE = 0;
-        protected const int CONJ_AND = 1;
-        protected const int CONJ_OR = 2;
-
-        protected const int MOD_NONE = 0;
-        protected const int MOD_NOT = 10;
-        protected const int MOD_REQ = 11;
-
-
-        // make it possible to call setDefaultOperator() without accessing
-        // the nested class:
-        
-        /// <summary>
-        /// Alternative form of QueryParser.Operator.AND
-        /// </summary>
-        public const Operator AND_OPERATOR = Operator.AND;
-        /// <summary>
-        /// Alternative form of QueryParser.Operator.OR
-        /// </summary>
-        public const Operator OR_OPERATOR = Operator.OR;
-
-        ///// <summary>
-        ///// The actual operator that parser uses to combine query terms
-        ///// </summary>
-        //Operator operator_Renamed = OR_OPERATOR;
-
-
-        // Note: In Java, this was part of the QueryParser class. 
-        // However, in .NET we need to put it here for the constants
-        // defined above.
-
-        /// <summary>
-        /// The default operator for parsing queries. 
-        /// Use <see cref="QueryParserBase.SetDefaultOperator"/> to change it.
-        /// </summary>
-        public enum Operator
-        {
-            OR,
-            AND
-        }
-
-        //bool lowercaseExpandedTerms = true;
-        //MultiTermQuery.RewriteMethod multiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT;
-        //bool allowLeadingWildcard = false;
-
-        // LUCENENET-423 - DateRange differences with Java and .NET
-        private bool _useJavaStyleDateRangeParsing = false;
-
-        protected string field;
-        //int phraseSlop = 0;
-        //float fuzzyMinSim = FuzzyQuery.DefaultMinSimilarity;
-        //int fuzzyPrefixLength = FuzzyQuery.DefaultPrefixLength;
-        //CultureInfo locale = CultureInfo.CurrentCulture;
-        //TimeZoneInfo timeZone = TimeZoneInfo.Local;
-
-        // TODO: Work out what the default date resolution SHOULD be (was null in Java, which isn't valid for an enum type)
-        
-        /// <summary>
-        /// the default date resolution
-        /// </summary>
-        DateTools.Resolution dateResolution = DateTools.Resolution.DAY;
-        /// <summary>
-        ///  maps field names to date resolutions
-        /// </summary>
-        IDictionary<string, DateTools.Resolution> fieldToDateResolution = null;
-
-        /// <summary>
-        /// Whether or not to analyze range terms when constructing RangeQuerys
-        /// (For example, analyzing terms into collation keys for locale-sensitive RangeQuery)
-        /// </summary>
-        //bool analyzeRangeTerms = false;
-
-        /// <summary>
-        /// So the generated QueryParser(CharStream) won't error out
-        /// </summary>
-        protected QueryParserBase()
-            : base(null)
-        {
-            // Set property defaults.
-            DefaultOperator = OR_OPERATOR;
-            LowercaseExpandedTerms = true;
-            MultiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT;
-            AllowLeadingWildcard = false;
-
-            PhraseSlop = 0;
-            FuzzyMinSim = FuzzyQuery.DefaultMinSimilarity;
-            FuzzyPrefixLength = FuzzyQuery.DefaultPrefixLength;
-            Locale = CultureInfo.CurrentCulture;
-            TimeZone = TimeZoneInfo.Local;
-            
-            AnalyzeRangeTerms = false;
-        }
-
-        /// <summary>
-        /// Initializes a query parser.  Called by the QueryParser constructor
-        /// </summary>
-        /// <param name="matchVersion">Lucene version to match.</param>
-        /// <param name="f">the default field for query terms.</param>
-        /// <param name="a">used to find terms in the query text.</param>
-        public void Init(LuceneVersion matchVersion, string f, Analyzer a)
-        {
-            Analyzer = a;
-            field = f;
-            if (matchVersion.OnOrAfter(LuceneVersion.LUCENE_31))
-            {
-                AutoGeneratePhraseQueries = false;
-            }
-            else
-            {
-                AutoGeneratePhraseQueries = true;
-            }
-        }
-
-        // the generated parser will create these in QueryParser
-        public abstract void ReInit(ICharStream stream);
-        public abstract Query TopLevelQuery(string field);
-
-        /// <summary>
-        /// Parses a query string, returning a <see cref="T:Query"/>.
-        /// </summary>
-        /// <remarks>
-        /// throws ParseException if the parsing fails
-        /// </remarks>
-        /// <param name="query">the query string to be parsed.</param>
-        /// <returns></returns>
-        public virtual Query Parse(string query)
-        {
-            ReInit(new FastCharStream(new StringReader(query)));
-            try
-            {
-                // TopLevelQuery is a Query followed by the end-of-input (EOF)
-                Query res = TopLevelQuery(field);
-                return res != null ? res : NewBooleanQuery(false);
-            }
-            catch (ParseException tme)
-            {
-                // rethrow to include the original query:
-                throw new ParseException("Cannot parse '" + query + "': " + tme.Message, tme);
-            }
-            catch (TokenMgrError tme)
-            {
-                throw new ParseException("Cannot parse '" + query + "': " + tme.Message, tme);
-            }
-            catch (BooleanQuery.TooManyClauses tmc)
-            {
-                throw new ParseException("Cannot parse '" + query + "': too many boolean clauses", tmc);
-            }
-        }
-
-        /// <summary>
-        /// Returns the default field.
-        /// </summary>
-        public string Field
-        {
-            get { return field; }
-        }
-
-        /// <summary>
-        /// Set to true if phrase queries will be automatically generated
-        /// when the analyzer returns more than one term from whitespace
-        /// delimited text.
-        /// NOTE: this behavior may not be suitable for all languages.
-        /// <p>
-        /// Set to false if phrase queries should only be generated when
-        /// surrounded by double quotes.
-        /// </summary>
-        public bool AutoGeneratePhraseQueries { get; set; }
-
-        /// <summary>
-        /// Get or Set the minimum similarity for fuzzy queries.
-        /// Default is 2f.
-        /// </summary>
-        public float FuzzyMinSim { get; set; }
-
-        /// <summary>
-        /// Get or Set the prefix length for fuzzy queries. 
-        /// Default is 0.
-        /// </summary>
-        public int FuzzyPrefixLength { get; set; }
-
-        /// <summary>
-        /// Gets or Sets the default slop for phrases. 
-        /// If zero, then exact phrase matches are required. 
-        /// Default value is zero.
-        /// </summary>
-        public int PhraseSlop { get; set; }
-
-        /// <summary>
-        /// Set to <code>true</code> to allow leading wildcard characters.
-        /// <p>
-        /// When set, <code>*</code> or <code>?</code> are allowed as
-        /// the first character of a PrefixQuery and WildcardQuery.
-        /// Note that this can produce very slow
-        /// queries on big indexes.
-        /// <p>
-        /// Default: false.
-        /// </summary>
-        public bool AllowLeadingWildcard { get; set; }
-
-        /// <summary>
-        /// Gets or Sets the boolean operator of the QueryParser.
-        /// In default mode (<code>OR_OPERATOR</code>) terms without any modifiers
-        /// are considered optional: for example <code>capital of Hungary</code> is equal to
-        /// <code>capital OR of OR Hungary</code>.<br/>
-        /// In <code>AND_OPERATOR</code> mode terms are considered to be in conjunction: the
-        /// above mentioned query is parsed as <code>capital AND of AND Hungary
-        /// </summary>
-        public Operator DefaultOperator { get; set; }
-
-        /// <summary>
-        /// Whether terms of wildcard, prefix, fuzzy and range queries are to be automatically
-        //  lower-cased or not.  Default is <code>true</code>.
-        /// </summary>
-        public bool LowercaseExpandedTerms { get; set; }
-
-        /// <summary>
-        /// By default QueryParser uses <see cref="MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT"/>
-        /// when creating a <see cref="PrefixQuery"/>, <see cref="WildcardQuery"/> or <see cref="TermRangeQuery"/>. This implementation is generally preferable because it
-        /// a) Runs faster b) Does not have the scarcity of terms unduly influence score
-        /// c) avoids any <see cref="TooManyClauses"/> exception.
-        /// However, if your application really needs to use the
-        /// old-fashioned <see cref="BooleanQuery"/> expansion rewriting and the above
-        /// points are not relevant then use this to change
-        /// the rewrite method.
-        /// </summary>
-        public MultiTermQuery.RewriteMethod MultiTermRewriteMethod { get; set; }
-
-        /// <summary>
-        /// Get or Set locale used by date range parsing, lowercasing, and other
-        /// locale-sensitive operations.
-        /// </summary>
-        public CultureInfo Locale { get; set; }
-
-        public TimeZoneInfo TimeZone { get; set; }
-
-        /// <summary>
-        /// Gets or Sets the default date resolution used by RangeQueries for fields for which no
-        /// specific date resolutions has been set. Field specific resolutions can be set
-        /// with <see cref="SetDateResolution(string,DateTools.Resolution)"/>.
-        /// </summary>
-        public void SetDateResolution(DateTools.Resolution dateResolution)
-        {
-            this.dateResolution = dateResolution;
-        }
-
-        /// <summary>
-        /// Sets the date resolution used by RangeQueries for a specific field.
-        /// </summary>
-        /// <param name="fieldName">field for which the date resolution is to be set</param>
-        /// <param name="dateResolution">date resolution to set</param>
-        public void SetDateResolution(string fieldName, DateTools.Resolution dateResolution)
-        {
-            if (string.IsNullOrEmpty(fieldName))
-            {
-                throw new ArgumentNullException("fieldName cannot be null or empty string.");
-            }
-
-            if (fieldToDateResolution == null)
-            {
-                // lazily initialize Dictionary
-                fieldToDateResolution = new Dictionary<string, DateTools.Resolution>();
-            }
-
-            fieldToDateResolution[fieldName] = dateResolution;
-        }
-
-        /// <summary>
-        /// Returns the date resolution that is used by RangeQueries for the given field.
-        /// Returns null, if no default or field specific date resolution has been set 
-        /// for the given field.
-        /// </summary>
-        /// <param name="fieldName"></param>
-        /// <returns></returns>
-        public DateTools.Resolution GetDateResolution(string fieldName)
-        {
-            if (string.IsNullOrEmpty(fieldName))
-            {
-                throw new ArgumentNullException("fieldName cannot be null or empty string.");
-            }
-
-            if (fieldToDateResolution == null)
-            {
-                // no field specific date resolutions set; return default date resolution instead
-                return this.dateResolution;
-            }
-
-            if (!fieldToDateResolution.ContainsKey(fieldName))
-            {
-                // no date resolutions set for the given field; return default date resolution instead
-                return this.dateResolution;
-            }
-
-            return fieldToDateResolution[fieldName];
-        }
-
-        /// <summary>
-        /// Get or Set whether or not to analyze range terms when constructing <see cref="TermRangeQuery"/>s.
-        /// For example, setting this to true can enable analyzing terms into 
-        /// collation keys for locale-sensitive <see cref="TermRangeQuery"/>.
-        /// </summary>
-        public bool AnalyzeRangeTerms { get; set; }
-
-        protected internal virtual void AddClause(IList<BooleanClause> clauses, int conj, int mods, Query q)
-        {
-            bool required, prohibited;
-
-            // If this term is introduced by AND, make the preceding term required,
-            // unless it's already prohibited
-            if (clauses.Count > 0 && conj == CONJ_AND)
-            {
-                BooleanClause c = clauses[clauses.Count - 1];
-                if (!c.Prohibited)
-                    c.Occur_ = BooleanClause.Occur.MUST;
-            }
-
-            if (clauses.Count > 0 && DefaultOperator == AND_OPERATOR && conj == CONJ_OR)
-            {
-                // If this term is introduced by OR, make the preceding term optional,
-                // unless it's prohibited (that means we leave -a OR b but +a OR b-->a OR b)
-                // notice if the input is a OR b, first term is parsed as required; without
-                // this modification a OR b would parsed as +a OR b
-                BooleanClause c = clauses[clauses.Count - 1];
-                if (!c.Prohibited)
-                    c.Occur_ = BooleanClause.Occur.SHOULD;
-            }
-
-            // We might have been passed a null query; the term might have been
-            // filtered away by the analyzer.
-            if (q == null)
-                return;
-
-            if (DefaultOperator == OR_OPERATOR)
-            {
-                // We set REQUIRED if we're introduced by AND or +; PROHIBITED if
-                // introduced by NOT or -; make sure not to set both.
-                prohibited = (mods == MOD_NOT);
-                required = (mods == MOD_REQ);
-                if (conj == CONJ_AND && !prohibited)
-                {
-                    required = true;
-                }
-            }
-            else
-            {
-                // We set PROHIBITED if we're introduced by NOT or -; We set REQUIRED
-                // if not PROHIBITED and not introduced by OR
-                prohibited = (mods == MOD_NOT);
-                required = (!prohibited && conj != CONJ_OR);
-            }
-            if (required && !prohibited)
-                clauses.Add(NewBooleanClause(q, BooleanClause.Occur.MUST));
-            else if (!required && !prohibited)
-                clauses.Add(NewBooleanClause(q, BooleanClause.Occur.SHOULD));
-            else if (!required && prohibited)
-                clauses.Add(NewBooleanClause(q, BooleanClause.Occur.MUST_NOT));
-            else
-                throw new Exception("Clause cannot be both required and prohibited");
-        }
-
-        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
-        protected internal virtual Query GetFieldQuery(string field, string queryText, bool quoted)
-        {
-            return NewFieldQuery(Analyzer, field, queryText, quoted);
-        }
-
-        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
-        protected internal virtual Query NewFieldQuery(Analyzer analyzer, string field, string queryText, bool quoted)
-        {
-            BooleanClause.Occur occur = DefaultOperator == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD;
-            return CreateFieldQuery(analyzer, occur, field, queryText, quoted || AutoGeneratePhraseQueries, PhraseSlop);
-        }
-
-        /// <summary>
-        /// Base implementation delegates to <see cref="GetFieldQuery(string,string,bool)"/>.
-        /// This method may be overridden, for example, to return
-        /// a SpanNearQuery instead of a PhraseQuery.
-        /// </summary>
-        /// <param name="field"></param>
-        /// <param name="queryText"></param>
-        /// <param name="slop"></param>
-        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
-        /// <returns></returns>
-        protected internal virtual Query GetFieldQuery(String field, String queryText, int slop)
-        {
-            Query query = GetFieldQuery(field, queryText, true);
-
-            if (query is PhraseQuery)
-            {
-                ((PhraseQuery)query).Slop = slop;
-            }
-            if (query is MultiPhraseQuery)
-            {
-                ((MultiPhraseQuery)query).Slop = slop;
-            }
-
-            return query;
-        }
-
-        protected internal virtual Query GetRangeQuery(string field,
-                              string part1,
-                              string part2,
-                              bool startInclusive,
-                              bool endInclusive)
-        {
-            if (LowercaseExpandedTerms)
-            {
-                part1 = part1 == null ? null : part1.ToLower(Locale);
-                part2 = part2 == null ? null : part2.ToLower(Locale);
-            }
-
-            try
-            {
-                DateTime d1, d2;
-                if (_useJavaStyleDateRangeParsing)
-                {
-                    // TODO: This doesn't emulate java perfectly.
-                    // Java allows parsing of the string up to the end of the pattern
-                    // and then ignores everything else.  .NET will throw an exception, 
-                    // so this will fail in those cases, though the code below is clear
-                    // that users can only specify the date, not the time.
-                    var shortFormat = Locale.DateTimeFormat.ShortDatePattern;
-                    d1 = DateTime.ParseExact(part1, shortFormat, Locale);
-                    d2 = DateTime.ParseExact(part2, shortFormat, Locale);
-                }
-                else
-                {
-                    d1 = DateTime.Parse(part1, Locale);
-                    d2 = DateTime.Parse(part2, Locale);
-                }
-
-                if (endInclusive)
-                {
-                    // The user can only specify the date, not the time, so make sure
-                    // the time is set to the latest possible time of that date to really
-                    // include all documents:
-
-                    // TODO: Try to work out if the Time Zone is pertinent here or
-                    // whether it should just be removed from the API entirely.
-                    // In Java:
-                    // Calendar cal = Calendar.getInstance(timeZone, locale);
-
-                    var cal = Locale.Calendar;
-                    d2 = cal.AddHours(d2, 23);
-                    d2 = cal.AddMinutes(d2, 59);
-                    d2 = cal.AddSeconds(d2, 59);
-                    d2 = cal.AddMilliseconds(d2, 999);
-                }
-                DateTools.Resolution resolution = GetDateResolution(field);
-
-                part1 = DateTools.DateToString(d1, resolution);
-                part2 = DateTools.DateToString(d2, resolution);
-
-            }
-            catch (Exception)
-            {
-            }
-
-            return NewRangeQuery(field, part1, part2, startInclusive, endInclusive);
-        }
-
-        /// <summary>Builds a new BooleanClause instance</summary>
-        /// <param name="q">sub query</param>
-        /// <param name="occur">how this clause should occur when matching documents</param>
-        /// <returns> new BooleanClause instance</returns>
-        protected internal virtual BooleanClause NewBooleanClause(Query q, BooleanClause.Occur occur)
-        {
-            return new BooleanClause(q, occur);
-        }
-
-        /// <summary>
-        /// Builds a new PrefixQuery instance
-        /// </summary>
-        /// <param name="prefix">Prefix term</param>
-        /// <returns>new PrefixQuery instance</returns>
-        protected internal virtual Query NewPrefixQuery(Term prefix)
-        {
-            PrefixQuery query = new PrefixQuery(prefix);
-            query.SetRewriteMethod(MultiTermRewriteMethod);
-            return query;
-        }
-
-        /// <summary>
-        /// Builds a new RegexpQuery instance
-        /// </summary>
-        /// <param name="regexp">Regexp term</param>
-        /// <returns>new RegexpQuery instance</returns>
-        protected internal virtual Query NewRegexpQuery(Term regexp)
-        {
-            RegexpQuery query = new RegexpQuery(regexp);
-            query.SetRewriteMethod(MultiTermRewriteMethod);
-            return query;
-        }
-
-        /// <summary>
-        /// Builds a new FuzzyQuery instance
-        /// </summary>
-        /// <param name="term">Term</param>
-        /// <param name="minimumSimilarity">minimum similarity</param>
-        /// <param name="prefixLength">prefix length</param>
-        /// <returns>new FuzzyQuery Instance</returns>
-        protected internal virtual Query NewFuzzyQuery(Term term, float minimumSimilarity, int prefixLength)
-        {
-            // FuzzyQuery doesn't yet allow constant score rewrite
-            string text = term.Text();
-            int numEdits = FuzzyQuery.FloatToEdits(minimumSimilarity,
-                Character.CodePointCount(text,0, text.Length));
-            return new FuzzyQuery(term, numEdits, prefixLength);
-        }
-
-        // LUCENE TODO: Should this be protected instead?
-        private BytesRef AnalyzeMultitermTerm(string field, string part)
-        {
-            return AnalyzeMultitermTerm(field, part, Analyzer);
-        }
-
-        protected internal virtual BytesRef AnalyzeMultitermTerm(string field, string part, Analyzer analyzerIn)
-        {
-            if (analyzerIn == null) analyzerIn = Analyzer;
-
-            TokenStream source = null;
-            try
-            {
-                source = analyzerIn.TokenStream(field, part);
-                source.Reset();
-
-                ITermToBytesRefAttribute termAtt = source.GetAttribute<ITermToBytesRefAttribute>();
-                BytesRef bytes = termAtt.BytesRef;
-
-                if (!source.IncrementToken())
-                    throw new ArgumentException("analyzer returned no terms for multiTerm term: " + part);
-                termAtt.FillBytesRef();
-                if (source.IncrementToken())
-                    throw new ArgumentException("analyzer returned too many terms for multiTerm term: " + part);
-                source.End();
-                return BytesRef.DeepCopyOf(bytes);
-            }
-            catch (IOException e)
-            {
-                throw new Exception("Error analyzing multiTerm term: " + part, e);
-            }
-            finally
-            {
-                IOUtils.CloseWhileHandlingException(source);
-            }
-        }
-
-        /// <summary>
-        /// Builds a new {@link TermRangeQuery} instance
-        /// </summary>
-        /// <param name="field">Field</param>
-        /// <param name="part1">min</param>
-        /// <param name="part2">max</param>
-        /// <param name="startInclusive">true if the start of the range is inclusive</param>
-        /// <param name="endInclusive">true if the end of the range is inclusive</param>
-        /// <returns>new <see cref="T:TermRangeQuery"/> instance</returns>
-        protected internal virtual Query NewRangeQuery(string field, string part1, string part2, bool startInclusive, bool endInclusive)
-        {
-            BytesRef start;
-            BytesRef end;
-
-            if (part1 == null)
-            {
-                start = null;
-            }
-            else
-            {
-                start = AnalyzeRangeTerms ? AnalyzeMultitermTerm(field, part1) : new BytesRef(part1);
-            }
-
-            if (part2 == null)
-            {
-                end = null;
-            }
-            else
-            {
-                end = AnalyzeRangeTerms ? AnalyzeMultitermTerm(field, part2) : new BytesRef(part2);
-            }
-
-            TermRangeQuery query = new TermRangeQuery(field, start, end, startInclusive, endInclusive);
-
-            query.SetRewriteMethod(MultiTermRewriteMethod);
-            return query;
-        }
-
-        /// <summary>
-        /// Builds a new MatchAllDocsQuery instance
-        /// </summary>
-        /// <returns>new MatchAllDocsQuery instance</returns>
-        protected internal virtual Query NewMatchAllDocsQuery()
-        {
-            return new MatchAllDocsQuery();
-        }
-
-        /// <summary>
-        /// Builds a new WildcardQuery instance
-        /// </summary>
-        /// <param name="t">wildcard term</param>
-        /// <returns>new WildcardQuery instance</returns>
-        protected internal virtual Query NewWildcardQuery(Term t)
-        {
-            WildcardQuery query = new WildcardQuery(t);
-            query.SetRewriteMethod(MultiTermRewriteMethod);
-            return query;
-        }
-
-        /// <summary>
-        /// Factory method for generating query, given a set of clauses.
-        /// By default creates a boolean query composed of clauses passed in.
-        ///
-        /// Can be overridden by extending classes, to modify query being
-        /// returned.
-        /// </summary>
-        /// <param name="clauses">List that contains {@link org.apache.lucene.search.BooleanClause} instances 
-        /// to join.</param>
-        /// <exception cref="T:ParseException">throw in overridden method to disallow</exception>
-        /// <returns>Resulting <see cref="T:Query"/> object.</returns>
-        protected internal virtual Query GetBooleanQuery(IList<BooleanClause> clauses)
-        {
-            return GetBooleanQuery(clauses, false);
-        }
-
-        /// <summary>
-        /// Factory method for generating query, given a set of clauses.
-        /// By default creates a boolean query composed of clauses passed in.
-        /// 
-        /// Can be overridden by extending classes, to modify query being
-        /// returned.
-        /// </summary>
-        /// <param name="clauses">List that contains <see cref="T:BooleanClause"/> instances
-        /// to join.</param>
-        /// <param name="disableCoord">true if coord scoring should be disabled.</param>
-        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
-        /// <returns>Resulting <see cref="T:Query"/> object.</returns>
-        protected internal virtual Query GetBooleanQuery(IList<BooleanClause> clauses, bool disableCoord)
-        {
-            if (clauses.Count == 0)
-            {
-                return null; // all clause words were filtered away by the analyzer.
-            }
-            BooleanQuery query = NewBooleanQuery(disableCoord);
-            foreach (BooleanClause clause in clauses)
-            {
-                query.Add(clause);
-            }
-            return query;
-        }
-
-        /// <summary>
-        /// Factory method for generating a query. Called when parser
-        /// parses an input term token that contains one or more wildcard
-        /// characters (? and *), but is not a prefix term token (one
-        /// that has just a single * character at the end)
-        /// <p>
-        /// Depending on settings, prefix term may be lower-cased
-        /// automatically. It will not go through the default Analyzer,
-        /// however, since normal Analyzers are unlikely to work properly
-        /// with wildcard templates.
-        /// <p>
-        /// Can be overridden by extending classes, to provide custom handling for
-        /// wildcard queries, which may be necessary due to missing analyzer calls.
-        /// </summary>
-        /// <param name="field">Name of the field query will use.</param>
-        /// <param name="termStr">Term token that contains one or more wild card
-        /// characters (? or *), but is not simple prefix term</param>
-        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
-        /// <returns>Resulting <see cref="T:Query"/> built for the term</returns>
-        protected internal virtual Query GetWildcardQuery(string field, string termStr)
-        {
-            if ("*".Equals(field))
-            {
-                if ("*".Equals(termStr)) return NewMatchAllDocsQuery();
-            }
-            if (!AllowLeadingWildcard && (termStr.StartsWith("*") || termStr.StartsWith("?")))
-                throw new ParseException("'*' or '?' not allowed as first character in WildcardQuery");
-            if (LowercaseExpandedTerms)
-            {
-                termStr = termStr.ToLower(Locale);
-            }
-            Term t = new Term(field, termStr);
-            return NewWildcardQuery(t);
-        }
-
-        /// <summary>
-        /// Factory method for generating a query. Called when parser
-        /// parses an input term token that contains a regular expression
-        /// query.
-        /// <p>
-        /// Depending on settings, pattern term may be lower-cased
-        /// automatically. It will not go through the default Analyzer,
-        /// however, since normal Analyzers are unlikely to work properly
-        /// with regular expression templates.
-        /// <p>
-        /// Can be overridden by extending classes, to provide custom handling for
-        /// regular expression queries, which may be necessary due to missing analyzer
-        /// calls.
-        /// </summary>
-        /// <param name="field">Name of the field query will use.</param>
-        /// <param name="termStr">Term token that contains a regular expression</param>
-        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
-        /// <returns>Resulting <see cref="T:Query"/> built for the term</returns>
-        protected internal virtual Query GetRegexpQuery(string field, string termStr)
-        {
-            if (LowercaseExpandedTerms)
-            {
-                termStr = termStr.ToLower(Locale);
-            }
-            Term t = new Term(field, termStr);
-            return NewRegexpQuery(t);
-        }
-
-        /// <summary>
-        /// Factory method for generating a query (similar to
-        /// <see cref="M:GetWildcardQuery"/>). Called when parser parses an input term
-        /// token that uses prefix notation; that is, contains a single '*' wildcard
-        /// character as its last character. Since this is a special case
-        /// of generic wildcard term, and such a query can be optimized easily,
-        /// this usually results in a different query object.
-        /// <p>
-        /// Depending on settings, a prefix term may be lower-cased
-        /// automatically. It will not go through the default Analyzer,
-        /// however, since normal Analyzers are unlikely to work properly
-        /// with wildcard templates.
-        /// <p>
-        /// Can be overridden by extending classes, to provide custom handling for
-        /// wild card queries, which may be necessary due to missing analyzer calls.
-        /// </summary>
-        /// <param name="field">Name of the field query will use.</param>
-        /// <param name="termStr">Term token to use for building term for the query</param>
-        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
-        /// <returns>Resulting <see cref="T:Query"/> built for the term</returns>
-        protected internal virtual Query GetPrefixQuery(string field, string termStr)
-        {
-            if (!AllowLeadingWildcard && termStr.StartsWith("*"))
-                throw new ParseException("'*' not allowed as first character in PrefixQuery");
-            if (LowercaseExpandedTerms)
-            {
-                termStr = termStr.ToLower(Locale);
-            }
-            Term t = new Term(field, termStr);
-            return NewPrefixQuery(t);
-        }
-
-        /// <summary>
-        /// Factory method for generating a query (similar to
-        /// <see cref="M:GetWildcardQuery"/>). Called when parser parses
-        /// an input term token that has the fuzzy suffix (~) appended.
-        /// </summary>
-        /// <param name="field">Name of the field query will use.</param>
-        /// <param name="termStr">Term token to use for building term for the query</param>
-        /// <param name="minSimilarity">minimum similarity</param>
-        /// <exception cref="ParseException">throw in overridden method to disallow</exception>
-        /// <returns>Resulting <see cref="T:Query"/> built for the term</returns>
-        protected internal virtual Query GetFuzzyQuery(string field, string termStr, float minSimilarity)
-        {
-            if (LowercaseExpandedTerms)
-            {
-                termStr = termStr.ToLower(Locale);
-            }
-            Term t = new Term(field, termStr);
-            return NewFuzzyQuery(t, minSimilarity, FuzzyPrefixLength);
-        }
-
-        // extracted from the .jj grammar
-        protected internal virtual Query HandleBareTokenQuery(string qfield, Token term, Token fuzzySlop, bool prefix, bool wildcard, bool fuzzy, bool regexp)
-        {
-            Query q;
-
-            string termImage = DiscardEscapeChar(term.image);
-            if (wildcard)
-            {
-                q = GetWildcardQuery(qfield, term.image);
-            }
-            else if (prefix)
-            {
-                q = GetPrefixQuery(qfield, DiscardEscapeChar(term.image.Substring(0, term.image.Length - 1)));
-            }
-            else if (regexp)
-            {
-                q = GetRegexpQuery(qfield, term.image.Substring(1, term.image.Length - 2));
-            }
-            else if (fuzzy)
-            {
-                q = HandleBareFuzzy(qfield, fuzzySlop, termImage);
-            }
-            else
-            {
-                q = GetFieldQuery(qfield, termImage, false);
-            }
-            return q;
-        }
-
-        protected internal virtual Query HandleBareFuzzy(string qfield, Token fuzzySlop, string termImage)
-        {
-            Query q;
-            float fms = FuzzyMinSim;
-            try
-            {
-                fms = float.Parse(fuzzySlop.image.Substring(1), Locale);
-            }
-            catch (Exception ignored) { }
-            if (fms < 0.0f)
-            {
-                throw new ParseException("Minimum similarity for a FuzzyQuery has to be between 0.0f and 1.0f !");
-            }
-            else if (fms >= 1.0f && fms != (int)fms)
-            {
-                throw new ParseException("Fractional edit distances are not allowed!");
-            }
-            q = GetFuzzyQuery(qfield, termImage, fms);
-            return q;
-        }
-
-        // extracted from the .jj grammar
-        protected internal virtual Query HandleQuotedTerm(string qfield, Token term, Token fuzzySlop)
-        {
-            int s = PhraseSlop;  // default
-            if (fuzzySlop != null)
-            {
-                try
-                {
-                    s = (int)float.Parse(fuzzySlop.image.Substring(1), Locale);
-                }
-                catch (Exception ignored) { }
-            }
-            return GetFieldQuery(qfield, DiscardEscapeChar(term.image.Substring(1, term.image.Length - 2)), s);
-        }
-
-        // extracted from the .jj grammar
-        protected internal virtual Query HandleBoost(Query q, Token boost)
-        {
-            if (boost != null)
-            {
-                float f = (float)1.0;
-                try
-                {
-                    f = float.Parse(boost.image, Locale);
-                }
-                catch (Exception ignored)
-                {
-                    /* Should this be handled somehow? (defaults to "no boost", if
-                     * boost number is invalid)
-                     */
-                }
-
-                // avoid boosting null queries, such as those caused by stop words
-                if (q != null)
-                {
-                    q.Boost = f;
-                }
-            }
-            return q;
-        }
-
-        /// <summary>
-        /// Returns a String where the escape char has been
-        /// removed, or kept only once if there was a double escape.
-        /// 
-        /// Supports escaped unicode characters, e. g. translates 
-        /// <code>\\u0041</code> to <code>A</code>.
-        /// </summary>
-        /// <param name="input"></param>
-        /// <returns></returns>
-        protected internal virtual string DiscardEscapeChar(string input)
-        {
-            // Create char array to hold unescaped char sequence
-            char[] output = new char[input.Length];
-
-            // The length of the output can be less than the input
-            // due to discarded escape chars. This variable holds
-            // the actual length of the output
-            int length = 0;
-
-            // We remember whether the last processed character was
-            // an escape character
-            bool lastCharWasEscapeChar = false;
-
-            // The multiplier the current unicode digit must be multiplied with.
-            // E. g. the first digit must be multiplied with 16^3, the second with 16^2...
-            int codePointMultiplier = 0;
-
-            // Used to calculate the codepoint of the escaped unicode character
-            int codePoint = 0;
-
-            for (int i = 0; i < input.Length; i++)
-            {
-                char curChar = input[i];
-                if (codePointMultiplier > 0)
-                {
-                    codePoint += HexToInt(curChar) * codePointMultiplier;
-                    codePointMultiplier = Number.URShift(codePointMultiplier, 4);
-                    if (codePointMultiplier == 0)
-                    {
-                        output[length++] = (char)codePoint;
-                        codePoint = 0;
-                    }
-                }
-                else if (lastCharWasEscapeChar)
-                {
-                    if (curChar == 'u')
-                    {
-                        // found an escaped unicode character
-                        codePointMultiplier = 16 * 16 * 16;
-                    }
-                    else
-                    {
-                        // this character was escaped
-                        output[length] = curChar;
-                        length++;
-                    }
-                    lastCharWasEscapeChar = false;
-                }
-                else
-                {
-                    if (curChar == '\\')
-                    {
-                        lastCharWasEscapeChar = true;
-                    }
-                    else
-                    {
-                        output[length] = curChar;
-                        length++;
-                    }
-                }
-            }
-
-            if (codePointMultiplier > 0)
-            {
-                throw new ParseException("Truncated unicode escape sequence.");
-            }
-
-            if (lastCharWasEscapeChar)
-            {
-                throw new ParseException("Term can not end with escape character.");
-            }
-
-            return new String(output, 0, length);
-        }
-
-        /// <summary>
-        /// Returns the numeric value of the hexadecimal character
-        /// </summary>
-        private static int HexToInt(char c)
-        {
-            if ('0' <= c && c <= '9')
-            {
-                return c - '0';
-            }
-            else if ('a' <= c && c <= 'f')
-            {
-                return c - 'a' + 10;
-            }
-            else if ('A' <= c && c <= 'F')
-            {
-                return c - 'A' + 10;
-            }
-            else
-            {
-                throw new ParseException("Non-hex character in Unicode escape sequence: " + c);
-            }
-        }
-
-        /// <summary>
-        /// Returns a String where those characters that QueryParser
-        /// expects to be escaped are escaped by a preceding <code>\</code>.
-        /// </summary>
-        public static string Escape(string s)
-        {
-            StringBuilder sb = new StringBuilder();
-            for (int i = 0; i < s.Length; i++)
-            {
-                char c = s[i];
-                // These characters are part of the query syntax and must be escaped
-                if (c == '\\' || c == '+' || c == '-' || c == '!' || c == '(' || c == ')' || c == ':'
-                  || c == '^' || c == '[' || c == ']' || c == '\"' || c == '{' || c == '}' || c == '~'
-                  || c == '*' || c == '?' || c == '|' || c == '&' || c == '/')
-                {
-                    sb.Append('\\');
-                }
-                sb.Append(c);
-            }
-            return sb.ToString();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs b/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs
deleted file mode 100644
index dcfa193..0000000
--- a/Lucene.Net.QueryParser/Classic/QueryParserConstants.cs
+++ /dev/null
@@ -1,224 +0,0 @@
-using System;
-
-namespace Lucene.Net.QueryParser.Classic
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    public static class RegexpToken
-    {
-        /// <summary>End of File. </summary>
-        public const int EOF = 0;
-        /// <summary>RegularExpression Id. </summary>
-        public const int _NUM_CHAR = 1;
-        /// <summary>RegularExpression Id. </summary>
-        public const int _ESCAPED_CHAR = 2;
-        /// <summary>RegularExpression Id. </summary>
-        public const int _TERM_START_CHAR = 3;
-        /// <summary>RegularExpression Id. </summary>
-        public const int _TERM_CHAR = 4;
-        /// <summary>RegularExpression Id. </summary>
-        public const int _WHITESPACE = 5;
-        /// <summary>RegularExpression Id. </summary>
-        public const int _QUOTED_CHAR = 6;
-        /// <summary>RegularExpression Id. </summary>
-        public const int AND = 8;
-        /// <summary>RegularExpression Id. </summary>
-        public const int OR = 9;
-        /// <summary>RegularExpression Id. </summary>
-        public const int NOT = 10;
-        /// <summary>RegularExpression Id. </summary>
-        public const int PLUS = 11;
-        /// <summary>RegularExpression Id. </summary>
-        public const int MINUS = 12;
-        /// <summary>RegularExpression Id. </summary>
-        public const int BAREOPER = 13;
-        /// <summary>RegularExpression Id. </summary>
-        public const int LPAREN = 14;
-        /// <summary>RegularExpression Id. </summary>
-        public const int RPAREN = 15;
-        /// <summary>RegularExpression Id. </summary>
-        public const int COLON = 16;
-        /// <summary>RegularExpression Id. </summary>
-        public const int STAR = 17;
-        /// <summary>RegularExpression Id. </summary>
-        public const int CARAT = 18;
-        /// <summary>RegularExpression Id. </summary>
-        public const int QUOTED = 19;
-        /// <summary>RegularExpression Id. </summary>
-        public const int TERM = 20;
-        /// <summary>RegularExpression Id. </summary>
-        public const int FUZZY_SLOP = 21;
-        /// <summary>RegularExpression Id. </summary>
-        public const int PREFIXTERM = 22;
-        /// <summary>RegularExpression Id. </summary>
-        public const int WILDTERM = 23;
-        /// <summary>RegularExpression Id. </summary>
-        public const int REGEXPTERM = 24;
-        /// <summary>RegularExpression Id. </summary>
-        public const int RANGEIN_START = 25;
-        /// <summary>RegularExpression Id. </summary>
-        public const int RANGEEX_START = 26;
-        /// <summary>RegularExpression Id. </summary>
-        public const int NUMBER = 27;
-        /// <summary>RegularExpression Id. </summary>
-        public const int RANGE_TO = 28;
-        /// <summary>RegularExpression Id. </summary>
-        public const int RANGEIN_END = 29;
-        /// <summary>RegularExpression Id. </summary>
-        public const int RANGEEX_END = 30;
-        /// <summary>RegularExpression Id. </summary>
-        public const int RANGE_QUOTED = 31;
-        /// <summary>RegularExpression Id. </summary>
-        public const int RANGE_GOOP = 32;
-    }
-
-    public static class LexicalToken
-    {
-        /// <summary>Lexical state.</summary>
-        public const int Boost = 0;
-        /// <summary>Lexical state.</summary>
-        public const int Range = 1;
-        /// <summary>Lexical state.</summary>
-        public const int DEFAULT = 2;
-    }
-
-    // NOTE: In Java, this was an interface. However, in 
-    // .NET we cannot define constants in an interface.
-    // So, instead we are making it a static class so it 
-    // can be shared between classes with different base classes.
-
-    // public interface QueryParserConstants
-
-	/// <summary> Token literal values and constants.
-	/// Generated by org.javacc.parser.OtherFilesGen#start()
-	/// </summary>
-    public static class QueryParserConstants
-    {
-        ///// <summary>End of File. </summary>
-        //public const int EndOfFileToken = 0;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int NumCharToken = 1;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int EscapedCharToken = 2;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int TermStartCharToken = 3;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int TermCharToken = 4;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int WhitespaceToken = 5;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int QuotedCharToken = 6;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int AndToken = 8;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int OrToken = 9;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int NotToken = 10;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int PlusToken = 11;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int MinusToken = 12;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int BareOperToken = 13;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int LParanToken = 14;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int RParenToken = 15;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int ColonToken = 16;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int StarToken = 17;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int CaratToken = 18;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int QuotedToken = 19;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int TermToken = 20;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int FuzzySlopToken = 21;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int PrefixTermToken = 22;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int WildTermToken = 23;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int RegExpTermToken = 24;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int RangeInStartToken = 25;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int RangeExStartToken = 26;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int NumberToken = 27;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int RangeToToken = 28;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int RangeInEndToken = 29;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int RangeExEndToken = 30;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int RangeQuotedToken = 31;
-        ///// <summary>RegularExpression Id. </summary>
-        //public const int RangeGoopToken = 32;
-
-        ///// <summary>Lexical state. </summary>
-        //public const int BoostToken = 0;
-        ///// <summary>Lexical state. </summary>
-        //public const int RangeToken = 1;
-        ///// <summary>Lexical state. </summary>
-        //public const int DefaultToken = 2;
-
-		/// <summary>Literal token values. </summary>
-		public static string[] TokenImage = new string[] {
-            "<EOF>", 
-            "<_NUM_CHAR>", 
-            "<_ESCAPED_CHAR>", 
-            "<_TERM_START_CHAR>", 
-            "<_TERM_CHAR>", 
-            "<_WHITESPACE>", 
-            "<_QUOTED_CHAR>", 
-            "<token of kind 7>", 
-            "<AND>", 
-            "<OR>", 
-            "<NOT>", 
-            "\"+\"", 
-            "\"-\"", 
-            "<BAREOPER>",
-            "\"(\"", 
-            "\")\"", 
-            "\":\"", 
-            "\"*\"", 
-            "\"^\"", 
-            "<QUOTED>", 
-            "<TERM>", 
-            "<FUZZY_SLOP>", 
-            "<PREFIXTERM>", 
-            "<WILDTERM>", 
-            "<REGEXPTERM>",
-            "\"[\"", 
-            "\"{\"", 
-            "<NUMBER>", 
-            "\"TO\"", 
-            "\"]\"", 
-            "<RANGEIN_QUOTED>", 
-            "<RANGEIN_GOOP>", 
-            "\"TO\"", 
-            "\"}\"", 
-            "<RANGE_QUOTED>",
-            "<RANGE_GOOP>"
-        };
-	}
-}
\ No newline at end of file


[48/50] [abbrv] lucenenet git commit: Cleaned up QueryParser.Surround.Parser.QueryParser comments

Posted by sy...@apache.org.
Cleaned up QueryParser.Surround.Parser.QueryParser comments


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/927b5a2a
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/927b5a2a
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/927b5a2a

Branch: refs/heads/master
Commit: 927b5a2aee55422b98806c6ef0191cc28654655e
Parents: bf63501
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sat Sep 3 04:02:08 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Sat Sep 3 04:02:08 2016 +0700

----------------------------------------------------------------------
 src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs | 8 +++-----
 1 file changed, 3 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/927b5a2a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
index 170eb74..cc18216 100644
--- a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
@@ -1,6 +1,7 @@
 \ufeffusing Lucene.Net.QueryParser.Surround.Query;
 using System;
 using System.Collections.Generic;
+using System.Globalization;
 using System.IO;
 
 namespace Lucene.Net.QueryParser.Surround.Parser
@@ -114,7 +115,7 @@ namespace Lucene.Net.QueryParser.Surround.Parser
             /* W, 2W, 3W etc -> 1, 2 3, etc. Same for N, 2N ... */
             return distanceOp.Length == 1
               ? 1
-              : int.Parse(distanceOp.Substring(0, distanceOp.Length - 1));
+              : int.Parse(distanceOp.Substring(0, distanceOp.Length - 1)); // LUCENENET TODO: Culture from current thread?
         }
 
         protected static void CheckDistanceSubQueries(DistanceQuery distq, string opName)
@@ -499,7 +500,6 @@ namespace Lucene.Net.QueryParser.Surround.Parser
                     break;
                 case RegexpToken.QUOTED:
                     term = Jj_consume_token(RegexpToken.QUOTED);
-                    // TODO: Substring fix
                     { if (true) return GetTermQuery(term.image.Substring(1, (term.image.Length - 1) - 1), true /* quoted */); }
                     break;
                 case RegexpToken.SUFFIXTERM:
@@ -509,7 +509,6 @@ namespace Lucene.Net.QueryParser.Surround.Parser
                     {
                         { if (true) throw new ParseException(truncationErrorMessage + term.image); }
                     }
-                    // TODO: Substring fix
                     { if (true) return GetPrefixQuery(term.image.Substring(0, term.image.Length - 1), false /* not quoted */); }
                     break;
                 case RegexpToken.TRUNCTERM:
@@ -528,7 +527,6 @@ namespace Lucene.Net.QueryParser.Surround.Parser
                     {
                         { if (true) throw new ParseException(truncationErrorMessage + term.image); }
                     }
-                    // TODO: Substring fix
                     { if (true) return GetPrefixQuery(term.image.Substring(1, (term.image.Length - 2) - 1), true /* quoted */); }
                     break;
                 default:
@@ -559,7 +557,7 @@ namespace Lucene.Net.QueryParser.Surround.Parser
                 float f;
                 try
                 {
-                    // TODO: Test parsing float in various cultures (.NET)
+                    // LUCENENET TODO: Test parsing float in various cultures (.NET)
                     f = float.Parse(weight.image);
                 }
                 catch (Exception floatExc)


[34/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs b/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
deleted file mode 100644
index aac1505..0000000
--- a/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
+++ /dev/null
@@ -1,1356 +0,0 @@
-using System;
-using System.Diagnostics.CodeAnalysis;
-using System.IO;
-
-namespace Lucene.Net.QueryParser.Classic
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-	/// <summary>Token Manager. </summary>
-	public class QueryParserTokenManager //: QueryParserConstants
-	{
-		private void  InitBlock()
-		{
-			StreamWriter temp_writer;
-			temp_writer = new StreamWriter(Console.OpenStandardOutput(), Console.Out.Encoding);
-			temp_writer.AutoFlush = true;
-			debugStream = temp_writer;
-		}
-		
-		/// <summary>Debug output. </summary>
-		public StreamWriter debugStream;
-		/// <summary>Set debug output. </summary>
-		public virtual void  SetDebugStream(StreamWriter ds)
-		{
-			debugStream = ds;
-		}
-		private int JjStopStringLiteralDfa_2(int pos, long active0)
-		{
-			switch (pos)
-			{
-				
-				default: 
-					return - 1;
-				
-			}
-		}
-		private int JjStartNfa_2(int pos, long active0)
-		{
-			return JjMoveNfa_2(JjStopStringLiteralDfa_2(pos, active0), pos + 1);
-		}
-		private int JjStopAtPos(int pos, int kind)
-		{
-			jjmatchedKind = kind;
-			jjmatchedPos = pos;
-			return pos + 1;
-		}
-		private int JjMoveStringLiteralDfa0_2()
-		{
-			switch (curChar)
-			{
-				
-				case (char) (40): 
-					return JjStopAtPos(0, 14);
-				
-				case (char) (41): 
-					return JjStopAtPos(0, 15);
-				
-				case (char) (42): 
-					return JjStartNfaWithStates_2(0, 17, 49);
-				
-				case (char) (43):
-                    return JjStartNfaWithStates_2(0, 11, 15);
-				
-				case (char) (45):
-                    return JjStartNfaWithStates_2(0, 12, 15);
-				
-				case (char) (58): 
-					return JjStopAtPos(0, 16);
-				
-				case (char) (91): 
-					return JjStopAtPos(0, 25);
-				
-				case (char) (94): 
-					return JjStopAtPos(0, 18);
-				
-				case (char) (123): 
-					return JjStopAtPos(0, 26);
-				
-				default:
-                    return JjMoveNfa_2(0, 0);
-				
-			}
-		}
-		private int JjStartNfaWithStates_2(int pos, int kind, int state)
-		{
-			jjmatchedKind = kind;
-			jjmatchedPos = pos;
-			try
-			{
-				curChar = input_stream.ReadChar();
-			}
-			catch (IOException)
-			{
-				return pos + 1;
-			}
-            return JjMoveNfa_2(state, pos + 1);
-		}
-		internal static readonly ulong[] jjbitVec0 = new ulong[]{0x1L, 0x0L, 0x0L, 0x0L};
-		internal static readonly ulong[] jjbitVec1 = new ulong[]{0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL};
-		internal static readonly ulong[] jjbitVec3 = new ulong[]{0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL};
-		internal static readonly ulong[] jjbitVec4 = new ulong[]{0xfffefffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL};
-        private int JjMoveNfa_2(int startState, int curPos)
-		{
-			int startsAt = 0;
-            jjnewStateCnt = 49;
-			int i = 1;
-			jjstateSet[0] = startState;
-			int kind = 0x7fffffff;
-			for (; ; )
-			{
-				if (++jjround == 0x7fffffff)
-					ReInitRounds();
-				if (curChar < 64)
-				{
-					ulong l = (ulong) (1L << (int) curChar);
-					do 
-					{
-						switch (jjstateSet[--i])
-						{
-							
-							case 49: 
-							case 33:
-                                if ((0xfbff7cf8ffffd9ffL & l) == (ulong)0L)
-									break;
-								if (kind > 23)
-									kind = 23;
-								JjCheckNAddTwoStates(33, 34);
-								break;
-							
-							case 0:
-                                if ((0xfbff54f8ffffd9ffL & l) != (ulong)0L)
-								{
-									if (kind > 23)
-										kind = 23;
-									JjCheckNAddTwoStates(33, 34);
-								}
-								else if ((0x100002600L & l) != 0L)
-								{
-									if (kind > 7)
-										kind = 7;
-								}
-                                else if ((0x280200000000L & l) != 0L)
-                                    jjstateSet[jjnewStateCnt++] = 15;
-								else if (curChar == 47)
-									JjCheckNAddStates(0, 2);
-								else if (curChar == 34)
-                                    JjCheckNAddStates(3, 5);
-                                if ((0x7bff50f8ffffd9ffL & l) != 0L)
-								{
-									if (kind > 20)
-										kind = 20;
-                                    JjCheckNAddStates(6, 10);
-								}
-                                else if (curChar == 42)
-                                {
-                                    if (kind > 22)
-                                        kind = 22;
-                                }
-                                else if (curChar == 33)
-                                {
-                                    if (kind > 10)
-                                        kind = 10;
-                                }
-								if (curChar == 38)
-									jjstateSet[jjnewStateCnt++] = 4;
-								break;
-							
-							case 4: 
-								if (curChar == 38 && kind > 8)
-									kind = 8;
-								break;
-							
-							case 5: 
-								if (curChar == 38)
-									jjstateSet[jjnewStateCnt++] = 4;
-								break;
-							
-							case 13: 
-								if (curChar == 33 && kind > 10)
-									kind = 10;
-								break;
-							
-							case 14:
-                                if ((0x280200000000L & l) != 0L)
-                                    jjstateSet[jjnewStateCnt++] = 15;
-                                break;
-                            case 15:
-                                if ((0x100002600L & l) != 0L && kind > 13)
-                                    kind = 13;
-                                break;
-                            case 16:
-								if (curChar == 34)
-									JjCheckNAddStates(3, 5);
-								break;
-							case 17: 
-								if ((0xfffffffbffffffffL & l) != (ulong) 0L)
-									JjCheckNAddStates(3, 5);
-								break;
-							
-							case 19: 
-								JjCheckNAddStates(3, 5);
-								break;
-							
-							case 20: 
-								if (curChar == 34 && kind > 19)
-									kind = 19;
-								break;
-							
-							case 22: 
-								if ((0x3ff000000000000L & l) == 0L)
-									break;
-								if (kind > 21)
-									kind = 21;
-								JjCheckNAddStates(11, 14);
-								break;
-							
-							case 23: 
-								if (curChar == 46)
-									JjCheckNAdd(24);
-								break;
-							
-							case 24: 
-								if ((0x3ff000000000000L & l) == 0L)
-									break;
-								if (kind > 21)
-									kind = 21;
-                                JjCheckNAddStates(15, 17);
-								break;
-							
-							case 25:
-                                if ((0x7bff78f8ffffd9ffL & l) == (ulong)0L)
-									break;
-								if (kind > 21)
-									kind = 21;
-								JjCheckNAddTwoStates(25, 26);
-								break;
-							
-							case 27: 
-								if (kind > 21)
-									kind = 21;
-								JjCheckNAddTwoStates(25, 26);
-								break;
-							
-							case 28:
-                                if ((0x7bff78f8ffffd9ffL & l) == 0L)
-									break;
-								if (kind > 21)
-									kind = 21;
-								JjCheckNAddTwoStates(28, 29);
-								break;
-							
-							case 30: 
-								if (kind > 21)
-									kind = 21;
-								JjCheckNAddTwoStates(28, 29);
-								break;
-							
-							case 31:
-                                if (curChar == 42 && kind > 22)
-                                    kind = 22;
-								break;
-							
-							case 32:
-                                if ((0xfbff54f8ffffd9ffL & l) == (ulong)0L)
-                                    break;
-                                if (kind > 23)
-                                    kind = 23;
-                                JjCheckNAddTwoStates(33, 34);
-                                break;
-                            case 35:
-                                if (kind > 23)
-                                    kind = 23;
-                                JjCheckNAddTwoStates(33, 34);
-                                break;
-                            case 36:
-                            case 38:
-                                if (curChar == 47)
-                                    JjCheckNAddStates(0, 2);
-                                break;
-                            case 37:
-                                if ((0xffff7fffffffffffL & l) != (ulong)0L)
-                                    JjCheckNAddStates(0, 2);
-                                break;
-                            case 40:
-                                if (curChar == 47 && kind > 24)
-                                    kind = 24;
-                                break;
-                            case 41:
-                                if ((0x7bff50f8ffffd9ffL & l) == 0L)
-                                    break;
-                                if (kind > 20)
-                                    kind = 20;
-                                JjCheckNAddStates(6, 10);
-                                break;
-                            case 42:
-                                if ((0x7bff78f8ffffd9ffL & l) == 0L)
-                                    break;
-                                if (kind > 20)
-                                    kind = 20;
-                                JjCheckNAddTwoStates(42, 43);
-                                break;
-                            case 44:
-                                if (kind > 20)
-                                    kind = 20;
-                                JjCheckNAddTwoStates(42, 43);
-                                break;
-                            case 45:
-                                if ((0x7bff78f8ffffd9ffL & l) != 0L)
-                                    JjCheckNAddStates(18, 20);
-                                break;
-                            case 47:
-                                JjCheckNAddStates(18, 20);
-                                break;
-							
-							default:  break;
-							
-						}
-					}
-					while (i != startsAt);
-				}
-				else if (curChar < 128)
-				{
-                    // NOTE: This didn't change in Java from 3.0.1 to 4.8.0, but it is different in .NET.
-                    // But changing it back made more tests pass, so I am working under the assumption 63
-                    // is the correct value.
-                    //ulong l = (ulong)(1L << (curChar & 077));
-                    ulong l = (ulong) (1L << (curChar & 63)); 
-					do 
-					{
-						switch (jjstateSet[--i])
-						{
-							
-							case 49: 
-								if ((0x97ffffff87ffffffL & l) != (ulong) 0L)
-								{
-									if (kind > 23)
-										kind = 23;
-									JjCheckNAddTwoStates(33, 34);
-								}
-								else if (curChar == 92)
-									JjCheckNAddTwoStates(35, 35);
-								break;
-							
-							case 0: 
-								if ((0x97ffffff87ffffffL & l) != (ulong) 0L)
-								{
-									if (kind > 20)
-										kind = 20;
-									JjCheckNAddStates(6, 10);
-								}
-								else if (curChar == 92)
-									JjCheckNAddStates(21, 23);
-								else if (curChar == 126)
-								{
-									if (kind > 21)
-										kind = 21;
-                                    JjCheckNAddStates(24, 26);
-								}
-								if ((0x97ffffff87ffffffL & l) != (ulong) 0L)
-								{
-									if (kind > 23)
-										kind = 23;
-									JjCheckNAddTwoStates(33, 34);
-								}
-								if (curChar == 78)
-									jjstateSet[jjnewStateCnt++] = 11;
-								else if (curChar == 124)
-									jjstateSet[jjnewStateCnt++] = 8;
-								else if (curChar == 79)
-									jjstateSet[jjnewStateCnt++] = 6;
-								else if (curChar == 65)
-									jjstateSet[jjnewStateCnt++] = 2;
-								break;
-							
-							case 1: 
-								if (curChar == 68 && kind > 8)
-									kind = 8;
-								break;
-							
-							case 2: 
-								if (curChar == 78)
-									jjstateSet[jjnewStateCnt++] = 1;
-								break;
-							
-							case 3: 
-								if (curChar == 65)
-									jjstateSet[jjnewStateCnt++] = 2;
-								break;
-							
-							case 6: 
-								if (curChar == 82 && kind > 9)
-									kind = 9;
-								break;
-							
-							case 7: 
-								if (curChar == 79)
-									jjstateSet[jjnewStateCnt++] = 6;
-								break;
-							
-							case 8: 
-								if (curChar == 124 && kind > 9)
-									kind = 9;
-								break;
-							
-							case 9: 
-								if (curChar == 124)
-									jjstateSet[jjnewStateCnt++] = 8;
-								break;
-							
-							case 10: 
-								if (curChar == 84 && kind > 10)
-									kind = 10;
-								break;
-							
-							case 11: 
-								if (curChar == 79)
-									jjstateSet[jjnewStateCnt++] = 10;
-								break;
-							
-							case 12: 
-								if (curChar == 78)
-									jjstateSet[jjnewStateCnt++] = 11;
-								break;
-							
-							case 17: 
-								if ((0xffffffffefffffffL & l) != (ulong) 0L)
-									JjCheckNAddStates(3, 5);
-								break;
-							
-							case 18: 
-								if (curChar == 92)
-									jjstateSet[jjnewStateCnt++] = 19;
-								break;
-							
-							case 19: 
-								JjCheckNAddStates(3, 5);
-								break;
-							
-							case 21: 
-								if (curChar != 126)
-									break;
-								if (kind > 21)
-									kind = 21;
-                                JjCheckNAddStates(24, 26);
-								break;
-							
-							case 25: 
-								if ((0x97ffffff87ffffffL & l) == (ulong) 0L)
-									break;
-								if (kind > 21)
-									kind = 21;
-								JjCheckNAddTwoStates(25, 26);
-								break;
-							
-							case 26: 
-								if (curChar == 92)
-									JjAddStates(27, 28);
-								break;
-							
-							case 27: 
-								if (kind > 21)
-									kind = 21;
-								JjCheckNAddTwoStates(25, 26);
-								break;
-							
-							case 28: 
-								if ((0x97ffffff87ffffffL & l) == (ulong) 0L)
-									break;
-								if (kind > 21)
-									kind = 21;
-								JjCheckNAddTwoStates(28, 29);
-								break;
-							
-							case 29:
-                                if (curChar == 92)
-                                    JjAddStates(29, 30);
-                                break;
-                            case 30:
-                                if (kind > 21)
-                                    kind = 21;
-                                JjCheckNAddTwoStates(28, 29);
-                                break;
-                            case 32:
-                                if ((0x97ffffff87ffffffL & l) == (ulong)0L)
-                                    break;
-                                if (kind > 23)
-                                    kind = 23;
-                                JjCheckNAddTwoStates(33, 34);
-                                break;
-                            case 33:
-                                if ((0x97ffffff87ffffffL & l) == (ulong)0L)
-                                    break;
-                                if (kind > 23)
-                                    kind = 23;
-                                JjCheckNAddTwoStates(33, 34);
-                                break;
-                            case 34:
-                                if (curChar == 92)
-                                    JjCheckNAddTwoStates(35, 35);
-                                break;
-                            case 35:
-                                if (kind > 23)
-                                    kind = 23;
-                                JjCheckNAddTwoStates(33, 34);
-                                break;
-                            case 37:
-                                JjAddStates(0, 2);
-                                break;
-                            case 39:
-                                if (curChar == 92)
-                                    jjstateSet[jjnewStateCnt++] = 38;
-                                break;
-                            case 41:
-                                if ((0x97ffffff87ffffffL & l) == (ulong)0L)
-                                    break;
-                                if (kind > 20)
-                                    kind = 20;
-                                JjCheckNAddStates(6, 10);
-                                break;
-                            case 42:
-                                if ((0x97ffffff87ffffffL & l) == (ulong)0L)
-                                    break;
-                                if (kind > 20)
-                                    kind = 20;
-                                JjCheckNAddTwoStates(42, 43);
-                                break;
-                            case 43:
-                                if (curChar == 92)
-                                    JjCheckNAddTwoStates(44, 44);
-                                break;
-                            case 44:
-                                if (kind > 20)
-                                    kind = 20;
-                                JjCheckNAddTwoStates(42, 43);
-                                break;
-                            case 45:
-                                if ((0x97ffffff87ffffffL & l) != (ulong)0L)
-                                    JjCheckNAddStates(18, 20);
-                                break;
-                            case 46:
-                                if (curChar == 92)
-                                    JjCheckNAddTwoStates(47, 47);
-                                break;
-                            case 47:
-                                JjCheckNAddStates(18, 20);
-                                break;
-                            case 48:
-                                if (curChar == 92)
-                                    JjCheckNAddStates(21, 23);
-                                break;
-
-                            default: break;
-							
-						}
-					}
-					while (i != startsAt);
-				}
-				else
-				{
-					int hiByte = (int) (curChar >> 8);
-					int i1 = hiByte >> 6;
-					ulong l1 = (ulong) (1L << (hiByte & 63));
-					int i2 = (curChar & 0xff) >> 6;
-					ulong l2 = (ulong) (1L << (curChar & 63));
-					do 
-					{
-						switch (jjstateSet[--i])
-						{
-							
-							case 49: 
-							case 33: 
-								if (!JjCanMove_2(hiByte, i1, i2, l1, l2))
-									break;
-								if (kind > 23)
-									kind = 23;
-								JjCheckNAddTwoStates(33, 34);
-								break;
-							
-							case 0: 
-								if (JjCanMove_0(hiByte, i1, i2, l1, l2))
-								{
-									if (kind > 7)
-										kind = 7;
-								}
-								if (JjCanMove_2(hiByte, i1, i2, l1, l2))
-								{
-									if (kind > 23)
-										kind = 23;
-									JjCheckNAddTwoStates(33, 34);
-								}
-								if (JjCanMove_2(hiByte, i1, i2, l1, l2))
-								{
-									if (kind > 20)
-										kind = 20;
-									JjCheckNAddStates(6, 10);
-								}
-								break;
-							
-							case 15: 
-                                if (JjCanMove_0(hiByte, i1, i2, l1, l2) && kind > 13)
-                                    kind = 13;
-                                break;
-							case 17: 
-                            case 19:
-								if (JjCanMove_1(hiByte, i1, i2, l1, l2))
-									JjCheckNAddStates(3, 5);
-								break;
-							
-							case 25: 
-								if (!JjCanMove_2(hiByte, i1, i2, l1, l2))
-									break;
-								if (kind > 21)
-									kind = 21;
-								JjCheckNAddTwoStates(25, 26);
-								break;
-							
-							case 27: 
-								if (!JjCanMove_1(hiByte, i1, i2, l1, l2))
-									break;
-								if (kind > 21)
-									kind = 21;
-								JjCheckNAddTwoStates(25, 26);
-								break;
-							
-							case 28: 
-								if (!JjCanMove_2(hiByte, i1, i2, l1, l2))
-									break;
-								if (kind > 21)
-									kind = 21;
-								JjCheckNAddTwoStates(28, 29);
-								break;
-                            case 30:
-                                if (!JjCanMove_1(hiByte, i1, i2, l1, l2))
-                                    break;
-                                if (kind > 21)
-                                    kind = 21;
-                                JjCheckNAddTwoStates(28, 29);
-                                break;
-							case 32: 
-								if (!JjCanMove_2(hiByte, i1, i2, l1, l2))
-									break;
-								if (kind > 23)
-									kind = 23;
-								JjCheckNAddTwoStates(33, 34);
-								break;
-							
-							case 35: 
-								if (!JjCanMove_1(hiByte, i1, i2, l1, l2))
-									break;
-								if (kind > 23)
-									kind = 23;
-								JjCheckNAddTwoStates(33, 34);
-								break;
-
-                            case 37:
-                                if (JjCanMove_1(hiByte, i1, i2, l1, l2))
-                                    JjAddStates(0, 2);
-                                break;
-                            case 41:
-                                if (!JjCanMove_2(hiByte, i1, i2, l1, l2))
-                                    break;
-                                if (kind > 20)
-                                    kind = 20;
-                                JjCheckNAddStates(6, 10);
-                                break;
-                            case 42:
-                                if (!JjCanMove_2(hiByte, i1, i2, l1, l2))
-                                    break;
-                                if (kind > 20)
-                                    kind = 20;
-                                JjCheckNAddTwoStates(42, 43);
-                                break;
-                            case 44:
-                                if (!JjCanMove_1(hiByte, i1, i2, l1, l2))
-                                    break;
-                                if (kind > 20)
-                                    kind = 20;
-                                JjCheckNAddTwoStates(42, 43);
-                                break;
-                            case 45:
-								if (JjCanMove_2(hiByte, i1, i2, l1, l2))
-									JjCheckNAddStates(18, 20);
-								break;
-							
-							case 47: 
-								if (JjCanMove_1(hiByte, i1, i2, l1, l2))
-									JjCheckNAddStates(18, 20);
-								break;
-							
-							default:  break;
-							
-						}
-					}
-					while (i != startsAt);
-				}
-				if (kind != 0x7fffffff)
-				{
-					jjmatchedKind = kind;
-					jjmatchedPos = curPos;
-					kind = 0x7fffffff;
-				}
-				++curPos;
-				if ((i = jjnewStateCnt) == (startsAt = 49 - (jjnewStateCnt = startsAt)))
-					return curPos;
-				try
-				{
-					curChar = input_stream.ReadChar();
-				}
-				catch (System.IO.IOException)
-				{
-					return curPos;
-				}
-			}
-		}
-		private int JjMoveStringLiteralDfa0_0()
-		{
-			return JjMoveNfa_0(0, 0);
-		}
-		private int JjMoveNfa_0(int startState, int curPos)
-		{
-			int startsAt = 0;
-			jjnewStateCnt = 3;
-			int i = 1;
-			jjstateSet[0] = startState;
-			int kind = 0x7fffffff;
-			for (; ; )
-			{
-				if (++jjround == 0x7fffffff)
-					ReInitRounds();
-				if (curChar < 64)
-				{
-					ulong l = (ulong) (1L << (int) curChar);
-					do 
-					{
-						switch (jjstateSet[--i])
-						{
-							
-							case 0: 
-								if ((0x3ff000000000000L & l) == 0L)
-									break;
-								if (kind > 27)
-									kind = 27;
-								JjAddStates(31, 32);
-								break;
-							
-							case 1: 
-								if (curChar == 46)
-									JjCheckNAdd(2);
-								break;
-							
-							case 2: 
-								if ((0x3ff000000000000L & l) == 0L)
-									break;
-								if (kind > 27)
-									kind = 27;
-								JjCheckNAdd(2);
-								break;
-							
-							default:  break;
-							
-						}
-					}
-					while (i != startsAt);
-				}
-				else if (curChar < 128)
-				{
-					ulong l = (ulong) (1L << (curChar & 63));
-					do 
-					{
-						switch (jjstateSet[--i])
-						{
-							
-							default:  break;
-							
-						}
-					}
-					while (i != startsAt);
-				}
-				else
-				{
-					int hiByte = (int) (curChar >> 8);
-					int i1 = hiByte >> 6;
-					long l1 = 1L << (hiByte & 63);
-					int i2 = (curChar & 0xff) >> 6;
-					long l2 = 1L << (curChar & 63);
-					do 
-					{
-						switch (jjstateSet[--i])
-						{
-							
-							default:  break;
-							
-						}
-					}
-					while (i != startsAt);
-				}
-				if (kind != 0x7fffffff)
-				{
-					jjmatchedKind = kind;
-					jjmatchedPos = curPos;
-					kind = 0x7fffffff;
-				}
-				++curPos;
-				if ((i = jjnewStateCnt) == (startsAt = 3 - (jjnewStateCnt = startsAt)))
-					return curPos;
-				try
-				{
-					curChar = input_stream.ReadChar();
-				}
-				catch (System.IO.IOException)
-				{
-					return curPos;
-				}
-			}
-		}
-		private int JjStopStringLiteralDfa_1(int pos, long active0)
-		{
-			switch (pos)
-			{
-				
-				case 0:
-                    if ((active0 & 0x10000000L) != 0L)
-					{
-						jjmatchedKind = 32;
-						return 6;
-					}
-					return - 1;
-				
-				default: 
-					return - 1;
-				
-			}
-		}
-		private int JjStartNfa_1(int pos, long active0)
-		{
-			return JjMoveNfa_1(JjStopStringLiteralDfa_1(pos, active0), pos + 1);
-		}
-		private int JjMoveStringLiteralDfa0_1()
-		{
-			switch (curChar)
-			{
-				
-				case (char)84:
-                    return JjMoveStringLiteralDfa1_1(0x10000000L);
-				
-				case (char)93: 
-					return JjStopAtPos(0, 29);
-
-                case (char)125:
-                    return JjStopAtPos(0, 30);
-
-				default: 
-					return JjMoveNfa_1(0, 0);
-				
-			}
-		}
-		private int JjMoveStringLiteralDfa1_1(long active0)
-		{
-			try
-			{
-				curChar = input_stream.ReadChar();
-			}
-			catch (System.IO.IOException)
-			{
-				JjStopStringLiteralDfa_1(0, active0);
-				return 1;
-			}
-			switch (curChar)
-			{
-				
-				case (char) (79):
-                    if ((active0 & 0x10000000L) != 0L)
-						return JjStartNfaWithStates_1(1, 28, 6);
-					break;
-				
-				default: 
-					break;
-				
-			}
-			return JjStartNfa_1(0, active0);
-		}
-		private int JjStartNfaWithStates_1(int pos, int kind, int state)
-		{
-			jjmatchedKind = kind;
-			jjmatchedPos = pos;
-			try
-			{
-				curChar = input_stream.ReadChar();
-			}
-			catch (System.IO.IOException)
-			{
-				return pos + 1;
-			}
-			return JjMoveNfa_1(state, pos + 1);
-		}
-		private int JjMoveNfa_1(int startState, int curPos)
-		{
-			int startsAt = 0;
-			jjnewStateCnt = 7;
-			int i = 1;
-			jjstateSet[0] = startState;
-			int kind = 0x7fffffff;
-			for (; ; )
-			{
-				if (++jjround == 0x7fffffff)
-					ReInitRounds();
-				if (curChar < 64)
-				{
-					ulong l = (ulong) (1L << (int) curChar);
-					do 
-					{
-						switch (jjstateSet[--i])
-						{
-							
-							case 0:
-                                if ((0xfffffffeffffffffL & l) != (ulong)0L)
-								{
-									if (kind > 32)
-										kind = 32;
-									JjCheckNAdd(6);
-								}
-								if ((0x100002600L & l) != 0L)
-								{
-									if (kind > 7)
-										kind = 7;
-								}
-								else if (curChar == 34)
-									JjCheckNAddTwoStates(2, 4);
-								break;
-							
-							case 1: 
-								if (curChar == 34)
-									JjCheckNAddTwoStates(2, 4);
-								break;
-							
-							case 2:
-                                if ((0xfffffffbffffffffL & l) != (ulong)0L)
-									JjCheckNAddStates(33, 35);
-								break;
-							
-							case 3: 
-								if (curChar == 34)
-									JjCheckNAddStates(33, 35);
-								break;
-							
-							case 5: 
-								if (curChar == 34 && kind > 31)
-									kind = 31;
-								break;
-							
-							case 6:
-                                if ((0xfffffffeffffffffL & l) == (ulong)0L)
-									break;
-								if (kind > 32)
-									kind = 32;
-								JjCheckNAdd(6);
-								break;
-							
-							default:  break;
-							
-						}
-					}
-					while (i != startsAt);
-				}
-				else if (curChar < 128)
-				{
-					ulong l = (ulong) (1L << (curChar & 63));
-					do 
-					{
-						switch (jjstateSet[--i])
-						{
-							
-							case 0: 
-							case 6:
-                                if ((0xdfffffffdfffffffL & l) == (ulong)0L)
-									break;
-								if (kind > 32)
-									kind = 32;
-								JjCheckNAdd(6);
-								break;
-							
-							case 2: 
-								JjAddStates(33, 35);
-								break;
-							
-							case 4: 
-								if (curChar == 92)
-									jjstateSet[jjnewStateCnt++] = 3;
-								break;
-							
-							default:  break;
-							
-						}
-					}
-					while (i != startsAt);
-				}
-				else
-				{
-					int hiByte = (int) (curChar >> 8);
-					int i1 = hiByte >> 6;
-					ulong l1 = (ulong) (1L << (hiByte & 63));
-					int i2 = (curChar & 0xff) >> 6;
-					ulong l2 = (ulong) (1L << (curChar & 63));
-					do 
-					{
-						switch (jjstateSet[--i])
-						{
-							
-							case 0: 
-								if (JjCanMove_0(hiByte, i1, i2, l1, l2))
-								{
-									if (kind > 7)
-										kind = 7;
-								}
-								if (JjCanMove_1(hiByte, i1, i2, l1, l2))
-								{
-									if (kind > 32)
-										kind = 32;
-									JjCheckNAdd(6);
-								}
-								break;
-							
-							case 2: 
-								if (JjCanMove_1(hiByte, i1, i2, l1, l2))
-									JjAddStates(33, 35);
-								break;
-							
-							case 6: 
-								if (!JjCanMove_1(hiByte, i1, i2, l1, l2))
-									break;
-								if (kind > 32)
-									kind = 32;
-								JjCheckNAdd(6);
-								break;
-							
-							default:  break;
-							
-						}
-					}
-					while (i != startsAt);
-				}
-				if (kind != 0x7fffffff)
-				{
-					jjmatchedKind = kind;
-					jjmatchedPos = curPos;
-					kind = 0x7fffffff;
-				}
-				++curPos;
-				if ((i = jjnewStateCnt) == (startsAt = 7 - (jjnewStateCnt = startsAt)))
-					return curPos;
-				try
-				{
-					curChar = input_stream.ReadChar();
-				}
-				catch (System.IO.IOException)
-				{
-					return curPos;
-				}
-			}
-		}
-        internal static readonly int[] jjnextStates = new int[]{
-           37, 39, 40, 17, 18, 20, 42, 45, 31, 46, 43, 22, 23, 25, 26, 24, 
-           25, 26, 45, 31, 46, 44, 47, 35, 22, 28, 29, 27, 27, 30, 30, 0, 
-           1, 2, 4, 5
-        };
-		private static bool JjCanMove_0(int hiByte, int i1, int i2, ulong l1, ulong l2)
-		{
-			switch (hiByte)
-			{
-				
-				case 48: 
-					return ((jjbitVec0[i2] & l2) != (ulong) 0L);
-				
-				default: 
-					return false;
-				
-			}
-		}
-		private static bool JjCanMove_1(int hiByte, int i1, int i2, ulong l1, ulong l2)
-		{
-			switch (hiByte)
-			{
-				
-				case 0: 
-					return ((jjbitVec3[i2] & l2) != (ulong) 0L);
-				
-				default: 
-					if ((jjbitVec1[i1] & l1) != (ulong) 0L)
-						return true;
-					return false;
-				
-			}
-		}
-		private static bool JjCanMove_2(int hiByte, int i1, int i2, ulong l1, ulong l2)
-		{
-			switch (hiByte)
-			{
-				
-				case 0: 
-					return ((jjbitVec3[i2] & l2) != (ulong) 0L);
-				
-				case 48: 
-					return ((jjbitVec1[i2] & l2) != (ulong) 0L);
-				
-				default: 
-					if ((jjbitVec4[i1] & l1) != (ulong) 0L)
-						return true;
-					return false;
-				
-			}
-		}
-
-        ///// <summary>Token literal values. </summary>
-        //public static readonly string[] jjstrLiteralImages = new string[] { 
-        //    "", null, null, null, null, null, null, null, null, null, null, "\x002B", "\x002D", 
-        //    "\x0028", "\x0029", "\x003A", "\x002A", "\x005E", null, null, null, null, null, "\x005B", "\x007B", 
-        //    null, "\x0054\x004F", "\x005D", null, null, "\x0054\x004F", "\x007D", null, null };
-		
-
-		/// <summary>Token literal values. </summary>
-		public static readonly string[] jjstrLiteralImages = new string[]{
-            "", null, null, null, null, null, null, null, null, null, null, "\x002B", "\x002D", 
-            null, "\x0028", "\x0029", "\x003A", "\x002A", "\x005E", null, null, null, null, null, null, 
-            "\x005B", "\x007B", null, "\x0054\x004F", "\x005D", "\x007D", null, null };
-		
-		/// <summary>Lexer state names. </summary>
-		public static readonly string[] lexStateNames = new string[] {
-            "Boost", 
-            "Range", 
-            "DEFAULT"
-        };
-		
-		/// <summary>Lex State array. </summary>
-		public static readonly int[] jjnewLexState = new int[] {
-            -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, -1, -1, -1, -1, -1, -1, 
-            1, 1, 2, -1, 2, 2, -1, -1
-        };
-        internal static readonly ulong[] jjtoToken = new ulong[] { 0x1ffffff01L };
-        internal static readonly long[] jjtoSkip = new long[] { 0x80L };
-		protected internal ICharStream input_stream;
-		private uint[] jjrounds = new uint[49];
-		private int[] jjstateSet = new int[98];
-		protected internal char curChar;
-		/// <summary>Constructor. </summary>
-		public QueryParserTokenManager(ICharStream stream)
-		{
-			InitBlock();
-			input_stream = stream;
-		}
-		
-		/// <summary>Constructor. </summary>
-		public QueryParserTokenManager(ICharStream stream, int lexState):this(stream)
-		{
-			SwitchTo(lexState);
-		}
-		
-		/// <summary>Reinitialise parser. </summary>
-		public virtual void  ReInit(ICharStream stream)
-		{
-			jjmatchedPos = jjnewStateCnt = 0;
-			curLexState = defaultLexState;
-			input_stream = stream;
-			ReInitRounds();
-		}
-		private void  ReInitRounds()
-		{
-			int i;
-			jjround = 0x80000001;
-			for (i = 49; i-- > 0; )
-				jjrounds[i] = 0x80000000;
-		}
-		
-		/// <summary>Reinitialise parser. </summary>
-		public virtual void  ReInit(ICharStream stream, int lexState)
-		{
-			ReInit(stream);
-			SwitchTo(lexState);
-		}
-		
-		/// <summary>Switch to specified lex state. </summary>
-		public virtual void  SwitchTo(int lexState)
-		{
-			if (lexState >= 3 || lexState < 0)
-				throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
-			else
-				curLexState = lexState;
-		}
-		
-		protected internal virtual Token JjFillToken()
-		{
-			Token t;
-			System.String curTokenImage;
-			int beginLine;
-			int endLine;
-			int beginColumn;
-			int endColumn;
-			System.String im = jjstrLiteralImages[jjmatchedKind];
-			curTokenImage = (im == null)?input_stream.Image:im;
-			beginLine = input_stream.BeginLine;
-			beginColumn = input_stream.BeginColumn;
-			endLine = input_stream.EndLine;
-			endColumn = input_stream.EndColumn;
-			t = Token.NewToken(jjmatchedKind, curTokenImage);
-			
-			t.beginLine = beginLine;
-			t.endLine = endLine;
-			t.beginColumn = beginColumn;
-			t.endColumn = endColumn;
-			
-			return t;
-		}
-		
-		internal int curLexState = 2;
-		internal int defaultLexState = 2;
-		internal int jjnewStateCnt;
-		internal uint jjround;
-		internal int jjmatchedPos;
-		internal int jjmatchedKind;
-		
-		/// <summary>Get the next Token. </summary>
-        [SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate")]
-        public virtual Token GetNextToken()
-		{
-			Token matchedToken;
-			int curPos = 0;
-			
-			for (; ; )
-			{
-				try
-				{
-					curChar = input_stream.BeginToken();
-				}
-				catch (IOException)
-				{
-					jjmatchedKind = 0;
-					matchedToken = JjFillToken();
-					return matchedToken;
-				}
-				
-				switch (curLexState)
-				{
-					
-					case 0: 
-						jjmatchedKind = 0x7fffffff;
-						jjmatchedPos = 0;
-						curPos = JjMoveStringLiteralDfa0_0();
-						break;
-					
-					case 1: 
-						jjmatchedKind = 0x7fffffff;
-						jjmatchedPos = 0;
-						curPos = JjMoveStringLiteralDfa0_1();
-						break;
-					
-					case 2: 
-						jjmatchedKind = 0x7fffffff;
-						jjmatchedPos = 0;
-						curPos = JjMoveStringLiteralDfa0_2();
-						break;
-					}
-				if (jjmatchedKind != 0x7fffffff)
-				{
-					if (jjmatchedPos + 1 < curPos)
-						input_stream.Backup(curPos - jjmatchedPos - 1);
-					if ((jjtoToken[jjmatchedKind >> 6] & ((ulong) 1L << (jjmatchedKind & 63))) != (ulong) 0L)
-					{
-						matchedToken = JjFillToken();
-						if (jjnewLexState[jjmatchedKind] != - 1)
-							curLexState = jjnewLexState[jjmatchedKind];
-						return matchedToken;
-					}
-					else
-					{
-						if (jjnewLexState[jjmatchedKind] != - 1)
-							curLexState = jjnewLexState[jjmatchedKind];
-						goto EOFLoop;
-					}
-				}
-				int error_line = input_stream.EndLine;
-				int error_column = input_stream.EndColumn;
-				System.String error_after = null;
-				bool EOFSeen = false;
-				try
-				{
-					input_stream.ReadChar(); input_stream.Backup(1);
-				}
-				catch (IOException)
-				{
-					EOFSeen = true;
-					error_after = curPos <= 1?"":input_stream.Image;
-					if (curChar == '\n' || curChar == '\r')
-					{
-						error_line++;
-						error_column = 0;
-					}
-					else
-						error_column++;
-				}
-				if (!EOFSeen)
-				{
-					input_stream.Backup(1);
-					error_after = curPos <= 1?"":input_stream.Image;
-				}
-				throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
-
-EOFLoop: ;
-			}
-		}
-		
-		private void  JjCheckNAdd(int state)
-		{
-			if (jjrounds[state] != jjround)
-			{
-				jjstateSet[jjnewStateCnt++] = state;
-				jjrounds[state] = jjround;
-			}
-		}
-		private void  JjAddStates(int start, int end)
-		{
-			do 
-			{
-				jjstateSet[jjnewStateCnt++] = jjnextStates[start];
-			}
-			while (start++ != end);
-		}
-		private void  JjCheckNAddTwoStates(int state1, int state2)
-		{
-			JjCheckNAdd(state1);
-			JjCheckNAdd(state2);
-		}
-		
-		private void  JjCheckNAddStates(int start, int end)
-		{
-			do 
-			{
-				JjCheckNAdd(jjnextStates[start]);
-			}
-			while (start++ != end);
-		}
-	}
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Classic/Token.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/Token.cs b/Lucene.Net.QueryParser/Classic/Token.cs
deleted file mode 100644
index 389e7b3..0000000
--- a/Lucene.Net.QueryParser/Classic/Token.cs
+++ /dev/null
@@ -1,142 +0,0 @@
-using System;
-
-namespace Lucene.Net.QueryParser.Classic
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-	
-	/// <summary> 
-    /// Describes the input token stream.
-    /// </summary>
-    [Serializable]
-	public class Token
-	{
-		
-		/// <summary> 
-        /// An integer that describes the kind of this token.  This numbering
-		/// system is determined by JavaCCParser, and a table of these numbers is
-		/// stored in the file ...Constants.java.
-		/// </summary>
-		public int kind;
-		
-		/// <summary>The line number of the first character of this Token. </summary>
-		public int beginLine;
-		/// <summary>The column number of the first character of this Token. </summary>
-		public int beginColumn;
-		/// <summary>The line number of the last character of this Token. </summary>
-		public int endLine;
-		/// <summary>The column number of the last character of this Token. </summary>
-		public int endColumn;
-		
-		/// <summary>The string image of the token.</summary>
-		public string image;
-		
-		/// <summary> 
-        /// A reference to the next regular (non-special) token from the input
-		/// stream.  If this is the last token from the input stream, or if the
-		/// token manager has not read tokens beyond this one, this field is
-		/// set to null.  This is true only if this token is also a regular
-		/// token.  Otherwise, see below for a description of the contents of
-		/// this field.
-		/// </summary>
-		public Token next;
-		
-		/// <summary> 
-        /// This field is used to access special tokens that occur prior to this
-		/// token, but after the immediately preceding regular (non-special) token.
-		/// If there are no such special tokens, this field is set to null.
-		/// When there are more than one such special token, this field refers
-		/// to the last of these special tokens, which in turn refers to the next
-		/// previous special token through its specialToken field, and so on
-		/// until the first special token (whose specialToken field is null).
-		/// The next fields of special tokens refer to other special tokens that
-		/// immediately follow it (without an intervening regular token).  If there
-		/// is no such token, this field is null.
-		/// </summary>
-		public Token specialToken;
-
-	    /// <summary> 
-        /// An optional attribute value of the Token.
-	    /// Tokens which are not used as syntactic sugar will often contain
-	    /// meaningful values that will be used later on by the compiler or
-	    /// interpreter. This attribute value is often different from the image.
-	    /// Any subclass of Token that actually wants to return a non-null value can
-	    /// override this method as appropriate.
-	    /// </summary>
-	    public virtual object Value
-	    {
-	        get { return null; }
-	    }
-
-	    /// <summary> 
-        /// No-argument constructor
-        /// </summary>
-		public Token()
-		{
-		}
-		
-		/// <summary> 
-        /// Constructs a new token for the specified Image.
-        /// </summary>
-		public Token(int kind)
-            : this(kind, null)
-		{
-		}
-		
-		/// <summary> 
-        /// Constructs a new token for the specified Image and Kind.
-        /// </summary>
-		public Token(int kind, string image)
-		{
-			this.kind = kind;
-			this.image = image;
-		}
-		
-		/// <summary> 
-        /// Returns the image.
-        /// </summary>
-		public override string ToString()
-		{
-			return image;
-		}
-		
-		/// <summary> 
-        /// Returns a new Token object, by default. However, if you want, you
-		/// can create and return subclass objects based on the value of ofKind.
-		/// Simply add the cases to the switch for all those special cases.
-		/// For example, if you have a subclass of Token called IDToken that
-		/// you want to create if ofKind is ID, simply add something like :
-		/// 
-		/// case MyParserConstants.ID : return new IDToken(ofKind, image);
-		/// 
-		/// to the following switch statement. Then you can cast matchedToken
-		/// variable to the appropriate type and use sit in your lexical actions.
-		/// </summary>
-		public static Token NewToken(int ofKind, string image)
-		{
-			switch (ofKind)
-			{
-				default:  return new Token(ofKind, image);
-			}
-		}
-		
-		public static Token NewToken(int ofKind)
-		{
-			return NewToken(ofKind, null);
-		}
-	}
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Classic/TokenMgrError.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/TokenMgrError.cs b/Lucene.Net.QueryParser/Classic/TokenMgrError.cs
deleted file mode 100644
index 2f69e13..0000000
--- a/Lucene.Net.QueryParser/Classic/TokenMgrError.cs
+++ /dev/null
@@ -1,170 +0,0 @@
-using System;
-using System.Text;
-
-namespace Lucene.Net.QueryParser.Classic
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-	/// <summary>Token Manager Error. </summary>
-	[Serializable]
-	public class TokenMgrError : Exception
-	{
-        /*
-		* Ordinals for various reasons why an Error of this type can be thrown.
-		*/
-
-        /// <summary> Lexical error occurred.</summary>
-        internal const int LEXICAL_ERROR = 0;
-
-        /// <summary> An attempt was made to create a second instance of a static token manager.</summary>
-        internal const int STATIC_LEXER_ERROR = 1;
-
-        /// <summary> Tried to change to an invalid lexical state.</summary>
-        internal const int INVALID_LEXICAL_STATE = 2;
-
-        /// <summary> Detected (and bailed out of) an infinite loop in the token manager.</summary>
-        internal const int LOOP_DETECTED = 3;
-
-        /// <summary> Indicates the reason why the exception is thrown. It will have
-        /// one of the above 4 values.
-        /// </summary>
-        internal int errorCode;
-
-        /// <summary> 
-        /// Replaces unprintable characters by their escaped (or unicode escaped)
-        /// equivalents in the given string
-        /// </summary>
-        protected internal static string AddEscapes(string str)
-        {
-            StringBuilder retval = new StringBuilder();
-            char ch;
-            for (int i = 0; i < str.Length; i++)
-            {
-                switch (str[i])
-                {
-
-                    case (char)(0):
-                        continue;
-
-                    case '\b':
-                        retval.Append("\\b");
-                        continue;
-
-                    case '\t':
-                        retval.Append("\\t");
-                        continue;
-
-                    case '\n':
-                        retval.Append("\\n");
-                        continue;
-
-                    case '\f':
-                        retval.Append("\\f");
-                        continue;
-
-                    case '\r':
-                        retval.Append("\\r");
-                        continue;
-
-                    case '\"':
-                        retval.Append("\\\"");
-                        continue;
-
-                    case '\'':
-                        retval.Append("\\\'");
-                        continue;
-
-                    case '\\':
-                        retval.Append("\\\\");
-                        continue;
-
-                    default:
-                        if ((ch = str[i]) < 0x20 || ch > 0x7e)
-                        {
-                            string s = "0000" + Convert.ToString(ch, 16);
-                            retval.Append("\\u" + s.Substring(s.Length - 4, (s.Length) - (s.Length - 4)));
-                        }
-                        else
-                        {
-                            retval.Append(ch);
-                        }
-                        continue;
-
-                }
-            }
-            return retval.ToString();
-        }
-
-        /// <summary>
-        /// Returns a detailed message for the Error when it is thrown by the
-        /// token manager to indicate a lexical error.
-        /// </summary>
-        /// <remarks>You can customize the lexical error message by modifying this method.</remarks>
-        /// <param name="EOFSeen">indicates if EOF caused the lexical error</param>
-        /// <param name="lexState">lexical state in which this error occurred</param>
-        /// <param name="errorLine">line number when the error occurred</param>
-        /// <param name="errorColumn">column number when the error occurred</param>
-        /// <param name="errorAfter">prefix that was seen before this error occurred</param>
-        /// <param name="curChar">the offending character</param>
-        /// <returns>Detailed error message</returns>
-        protected internal static string LexicalError(bool EOFSeen, int lexState, int errorLine, int errorColumn, string errorAfter, char curChar)
-        {
-            return ("Lexical error at line " +
-                errorLine + ", column " +
-                errorColumn + ".  Encountered: " +
-                (EOFSeen ? "<EOF> " : ("\"" + AddEscapes(Convert.ToString(curChar)) + "\"") + " (" + (int)curChar + "), ") +
-                "after : \"" + AddEscapes(errorAfter) + "\"");
-        }
-
-		/// <summary> 
-        /// You can also modify the body of this method to customize your error messages.
-		/// For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not
-		/// of end-users concern, so you can return something like :
-		/// 
-		/// "Internal Error : Please file a bug report .... "
-		/// 
-		/// from this method for such cases in the release version of your parser.
-		/// </summary>
-		public override string Message
-		{
-			get { return base.Message; }
-		}
-		
-		/*
-		* Constructors of various flavors follow.
-		*/
-		
-		/// <summary>No arg constructor. </summary>
-		public TokenMgrError()
-		{
-		}
-		
-		/// <summary>Constructor with message and reason. </summary>
-		public TokenMgrError(string message, int reason)
-            : base(message)
-		{
-			errorCode = reason;
-		}
-		
-		/// <summary>Full Constructor. </summary>
-		public TokenMgrError(bool EOFSeen, int lexState, int errorLine, int errorColumn, string errorAfter, char curChar, int reason)
-            : this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason)
-		{
-		}
-	}
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs b/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
deleted file mode 100644
index 0ac7c5b..0000000
--- a/Lucene.Net.QueryParser/ComplexPhrase/ComplexPhraseQueryParser.cs
+++ /dev/null
@@ -1,468 +0,0 @@
-\ufeffusing Lucene.Net.Analysis;
-using Lucene.Net.Index;
-using Lucene.Net.QueryParser.Classic;
-using Lucene.Net.Search;
-using Lucene.Net.Search.Spans;
-using Lucene.Net.Util;
-using System;
-using System.Collections.Generic;
-using System.Linq;
-
-namespace Lucene.Net.QueryParser.ComplexPhrase
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// QueryParser which permits complex phrase query syntax eg "(john jon
-    /// jonathan~) peters*".
-    /// <p>
-    /// Performs potentially multiple passes over Query text to parse any nested
-    /// logic in PhraseQueries. - First pass takes any PhraseQuery content between
-    /// quotes and stores for subsequent pass. All other query content is parsed as
-    /// normal - Second pass parses any stored PhraseQuery content, checking all
-    /// embedded clauses are referring to the same field and therefore can be
-    /// rewritten as Span queries. All PhraseQuery clauses are expressed as
-    /// ComplexPhraseQuery objects
-    /// </p>
-    /// <p>
-    /// This could arguably be done in one pass using a new QueryParser but here I am
-    /// working within the constraints of the existing parser as a base class. This
-    /// currently simply feeds all phrase content through an analyzer to select
-    /// phrase terms - any "special" syntax such as * ~ * etc are not given special
-    /// status
-    /// </p>
-    /// </summary>
-    public class ComplexPhraseQueryParser : Classic.QueryParser
-    {
-        private List<ComplexPhraseQuery> complexPhrases = null;
-
-        private bool isPass2ResolvingPhrases;
-
-        /// <summary>
-        /// When <code>inOrder</code> is true, the search terms must
-        /// exists in the documents as the same order as in query.
-        /// Choose between ordered (true) or un-ordered (false) proximity search.
-        /// </summary>
-        public bool InOrder { get; set; }
-
-        private ComplexPhraseQuery currentPhraseQuery = null;
-
-        public ComplexPhraseQueryParser(LuceneVersion matchVersion, string f, Analyzer a)
-            : base(matchVersion, f, a)
-        {
-            // set property defaults
-            this.InOrder = true;
-        }
-
-        protected internal override Query GetFieldQuery(string field, string queryText, int slop)
-        {
-            ComplexPhraseQuery cpq = new ComplexPhraseQuery(field, queryText, slop, InOrder);
-            complexPhrases.Add(cpq); // add to list of phrases to be parsed once
-            // we
-            // are through with this pass
-            return cpq;
-        }
-
-        public override Query Parse(string query)
-        {
-            if (isPass2ResolvingPhrases)
-            {
-                MultiTermQuery.RewriteMethod oldMethod = MultiTermRewriteMethod;
-                try
-                {
-                    // Temporarily force BooleanQuery rewrite so that Parser will
-                    // generate visible
-                    // collection of terms which we can convert into SpanQueries.
-                    // ConstantScoreRewrite mode produces an
-                    // opaque ConstantScoreQuery object which cannot be interrogated for
-                    // terms in the same way a BooleanQuery can.
-                    // QueryParser is not guaranteed threadsafe anyway so this temporary
-                    // state change should not
-                    // present an issue
-                    MultiTermRewriteMethod = MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE;
-                    return base.Parse(query);
-                }
-                finally
-                {
-                    MultiTermRewriteMethod = oldMethod;
-                }
-            }
-
-            // First pass - parse the top-level query recording any PhraseQuerys
-            // which will need to be resolved
-            complexPhrases = new List<ComplexPhraseQuery>();
-            Query q = base.Parse(query);
-
-            // Perform second pass, using this QueryParser to parse any nested
-            // PhraseQueries with different
-            // set of syntax restrictions (i.e. all fields must be same)
-            isPass2ResolvingPhrases = true;
-            try
-            {
-                foreach (var currentPhraseQuery in complexPhrases)
-                {
-                    this.currentPhraseQuery = currentPhraseQuery;
-                    // in each phrase, now parse the contents between quotes as a
-                    // separate parse operation
-                    currentPhraseQuery.ParsePhraseElements(this);
-                }
-            }
-            finally
-            {
-                isPass2ResolvingPhrases = false;
-            }
-            return q;
-        }
-
-        // There is No "getTermQuery throws ParseException" method to override so
-        // unfortunately need
-        // to throw a runtime exception here if a term for another field is embedded
-        // in phrase query
-        protected override Query NewTermQuery(Term term)
-        {
-            if (isPass2ResolvingPhrases)
-            {
-                try
-                {
-                    CheckPhraseClauseIsForSameField(term.Field);
-                }
-                catch (ParseException pe)
-                {
-                    throw new Exception("Error parsing complex phrase", pe);
-                }
-            }
-            return base.NewTermQuery(term);
-        }
-
-        // Helper method used to report on any clauses that appear in query syntax
-        private void CheckPhraseClauseIsForSameField(string field)
-        {
-            if (!field.Equals(currentPhraseQuery.Field))
-            {
-                throw new ParseException("Cannot have clause for field \"" + field
-                    + "\" nested in phrase " + " for field \"" + currentPhraseQuery.Field
-                    + "\"");
-            }
-        }
-
-        protected internal override Query GetWildcardQuery(string field, string termStr)
-        {
-            if (isPass2ResolvingPhrases)
-            {
-                CheckPhraseClauseIsForSameField(field);
-            }
-            return base.GetWildcardQuery(field, termStr);
-        }
-
-        protected internal override Query GetRangeQuery(string field, string part1, string part2, bool startInclusive, bool endInclusive)
-        {
-            if (isPass2ResolvingPhrases)
-            {
-                CheckPhraseClauseIsForSameField(field);
-            }
-            return base.GetRangeQuery(field, part1, part2, startInclusive, endInclusive);
-        }
-
-        protected internal override Query NewRangeQuery(string field, string part1, string part2, bool startInclusive, bool endInclusive)
-        {
-            if (isPass2ResolvingPhrases)
-            {
-                // Must use old-style RangeQuery in order to produce a BooleanQuery
-                // that can be turned into SpanOr clause
-                TermRangeQuery rangeQuery = TermRangeQuery.NewStringRange(field, part1, part2, startInclusive, endInclusive);
-                rangeQuery.SetRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
-                return rangeQuery;
-            }
-            return base.NewRangeQuery(field, part1, part2, startInclusive, endInclusive);
-        }
-
-        protected internal override Query GetFuzzyQuery(string field, string termStr, float minSimilarity)
-        {
-            if (isPass2ResolvingPhrases)
-            {
-                CheckPhraseClauseIsForSameField(field);
-            }
-            return base.GetFuzzyQuery(field, termStr, minSimilarity);
-        }
-
-        /// <summary>
-        /// Used to handle the query content in between quotes and produced Span-based
-        /// interpretations of the clauses.
-        /// </summary>
-        public class ComplexPhraseQuery : Query
-        {
-            private readonly string field;
-            private readonly string phrasedQueryStringContents;
-            private readonly int slopFactor;
-            private readonly bool inOrder;
-            private Query contents;
-
-            public ComplexPhraseQuery(string field, string phrasedQueryStringContents,
-                int slopFactor, bool inOrder)
-            {
-                this.field = field;
-                this.phrasedQueryStringContents = phrasedQueryStringContents;
-                this.slopFactor = slopFactor;
-                this.inOrder = inOrder;
-            }
-
-            public string Field
-            {
-                get { return field; }
-            }
-
-            // Called by ComplexPhraseQueryParser for each phrase after the main
-            // parse
-            // thread is through
-            protected internal void ParsePhraseElements(ComplexPhraseQueryParser qp)
-            {
-                // TODO ensure that field-sensitivity is preserved ie the query
-                // string below is parsed as
-                // field+":("+phrasedQueryStringContents+")"
-                // but this will need code in rewrite to unwrap the first layer of
-                // boolean query
-
-                string oldDefaultParserField = qp.Field;
-                try
-                {
-                    //temporarily set the QueryParser to be parsing the default field for this phrase e.g author:"fred* smith"
-                    qp.field = this.field;
-                    contents = qp.Parse(phrasedQueryStringContents);
-                }
-                finally
-                {
-                    qp.field = oldDefaultParserField;
-                }
-            }
-
-            public override Query Rewrite(IndexReader reader)
-            {
-                // ArrayList spanClauses = new ArrayList();
-                if (contents is TermQuery)
-                {
-                    return contents;
-                }
-                // Build a sequence of Span clauses arranged in a SpanNear - child
-                // clauses can be complex
-                // Booleans e.g. nots and ors etc
-                int numNegatives = 0;
-                if (!(contents is BooleanQuery))
-                {
-                    throw new ArgumentException("Unknown query type \""
-                        + contents.GetType().Name
-                        + "\" found in phrase query string \"" + phrasedQueryStringContents
-                        + "\"");
-                }
-                BooleanQuery bq = (BooleanQuery)contents;
-                BooleanClause[] bclauses = bq.Clauses;
-                SpanQuery[] allSpanClauses = new SpanQuery[bclauses.Length];
-                // For all clauses e.g. one* two~
-                for (int i = 0; i < bclauses.Length; i++)
-                {
-                    // HashSet bclauseterms=new HashSet();
-                    Query qc = bclauses[i].Query;
-                    // Rewrite this clause e.g one* becomes (one OR onerous)
-                    qc = qc.Rewrite(reader);
-                    if (bclauses[i].Occur_.Equals(BooleanClause.Occur.MUST_NOT))
-                    {
-                        numNegatives++;
-                    }
-
-                    if (qc is BooleanQuery)
-                    {
-                        List<SpanQuery> sc = new List<SpanQuery>();
-                        AddComplexPhraseClause(sc, (BooleanQuery)qc);
-                        if (sc.Count > 0)
-                        {
-                            allSpanClauses[i] = sc.ElementAt(0);
-                        }
-                        else
-                        {
-                            // Insert fake term e.g. phrase query was for "Fred Smithe*" and
-                            // there were no "Smithe*" terms - need to
-                            // prevent match on just "Fred".
-                            allSpanClauses[i] = new SpanTermQuery(new Term(field,
-                                "Dummy clause because no terms found - must match nothing"));
-                        }
-                    }
-                    else
-                    {
-                        if (qc is TermQuery)
-                        {
-                            TermQuery tq = (TermQuery)qc;
-                            allSpanClauses[i] = new SpanTermQuery(tq.Term);
-                        }
-                        else
-                        {
-                            throw new ArgumentException("Unknown query type \""
-                                + qc.GetType().Name
-                                + "\" found in phrase query string \""
-                                + phrasedQueryStringContents + "\"");
-                        }
-
-                    }
-                }
-                if (numNegatives == 0)
-                {
-                    // The simple case - no negative elements in phrase
-                    return new SpanNearQuery(allSpanClauses, slopFactor, inOrder);
-                }
-                // Complex case - we have mixed positives and negatives in the
-                // sequence.
-                // Need to return a SpanNotQuery
-                List<SpanQuery> positiveClauses = new List<SpanQuery>();
-                for (int j = 0; j < allSpanClauses.Length; j++)
-                {
-                    if (!bclauses[j].Occur_.Equals(BooleanClause.Occur.MUST_NOT))
-                    {
-                        positiveClauses.Add(allSpanClauses[j]);
-                    }
-                }
-
-                SpanQuery[] includeClauses = positiveClauses
-                    .ToArray();
-
-                SpanQuery include = null;
-                if (includeClauses.Length == 1)
-                {
-                    include = includeClauses[0]; // only one positive clause
-                }
-                else
-                {
-                    // need to increase slop factor based on gaps introduced by
-                    // negatives
-                    include = new SpanNearQuery(includeClauses, slopFactor + numNegatives,
-                        inOrder);
-                }
-                // Use sequence of positive and negative values as the exclude.
-                SpanNearQuery exclude = new SpanNearQuery(allSpanClauses, slopFactor,
-                    inOrder);
-                SpanNotQuery snot = new SpanNotQuery(include, exclude);
-                return snot;
-            }
-
-            private void AddComplexPhraseClause(List<SpanQuery> spanClauses, BooleanQuery qc)
-            {
-                List<SpanQuery> ors = new List<SpanQuery>();
-                List<SpanQuery> nots = new List<SpanQuery>();
-                BooleanClause[] bclauses = qc.Clauses;
-
-                // For all clauses e.g. one* two~
-                for (int i = 0; i < bclauses.Length; i++)
-                {
-                    Query childQuery = bclauses[i].Query;
-
-                    // select the list to which we will add these options
-                    List<SpanQuery> chosenList = ors;
-                    if (bclauses[i].Occur_ == BooleanClause.Occur.MUST_NOT)
-                    {
-                        chosenList = nots;
-                    }
-
-                    if (childQuery is TermQuery)
-                    {
-                        TermQuery tq = (TermQuery)childQuery;
-                        SpanTermQuery stq = new SpanTermQuery(tq.Term);
-                        stq.Boost = tq.Boost;
-                        chosenList.Add(stq);
-                    }
-                    else if (childQuery is BooleanQuery)
-                    {
-                        BooleanQuery cbq = (BooleanQuery)childQuery;
-                        AddComplexPhraseClause(chosenList, cbq);
-                    }
-                    else
-                    {
-                        // LUCENETODO alternatively could call extract terms here?
-                        throw new ArgumentException("Unknown query type:"
-                            + childQuery.GetType().Name);
-                    }
-                }
-                if (ors.Count == 0)
-                {
-                    return;
-                }
-                SpanOrQuery soq = new SpanOrQuery(ors
-                    .ToArray());
-                if (nots.Count == 0)
-                {
-                    spanClauses.Add(soq);
-                }
-                else
-                {
-                    SpanOrQuery snqs = new SpanOrQuery(nots
-                        .ToArray());
-                    SpanNotQuery snq = new SpanNotQuery(soq, snqs);
-                    spanClauses.Add(snq);
-                }
-            }
-
-            public override string ToString(string field)
-            {
-                return "\"" + phrasedQueryStringContents + "\"";
-            }
-
-            public override int GetHashCode()
-            {
-                int prime = 31;
-                int result = base.GetHashCode();
-                result = prime * result + ((field == null) ? 0 : field.GetHashCode());
-                result = prime
-                    * result
-                    + ((phrasedQueryStringContents == null) ? 0
-                        : phrasedQueryStringContents.GetHashCode());
-                result = prime * result + slopFactor;
-                result = prime * result + (inOrder ? 1 : 0);
-                return result;
-            }
-
-            public override bool Equals(object obj)
-            {
-                if (this == obj)
-                    return true;
-                if (obj == null)
-                    return false;
-                if (GetType() != obj.GetType())
-                    return false;
-                if (!base.Equals(obj))
-                {
-                    return false;
-                }
-                ComplexPhraseQuery other = (ComplexPhraseQuery)obj;
-                if (field == null)
-                {
-                    if (other.field != null)
-                        return false;
-                }
-                else if (!field.Equals(other.field))
-                    return false;
-                if (phrasedQueryStringContents == null)
-                {
-                    if (other.phrasedQueryStringContents != null)
-                        return false;
-                }
-                else if (!phrasedQueryStringContents
-                  .Equals(other.phrasedQueryStringContents))
-                    return false;
-                if (slopFactor != other.slopFactor)
-                    return false;
-                return inOrder == other.inOrder;
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs b/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs
deleted file mode 100644
index 6418f87..0000000
--- a/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs
+++ /dev/null
@@ -1,131 +0,0 @@
-\ufeffusing Lucene.Net.Analysis;
-using Lucene.Net.Search;
-using Lucene.Net.Util;
-using System;
-
-namespace Lucene.Net.QueryParser.Ext
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// The <see cref="ExtendableQueryParser"/> enables arbitrary query parser extension
-    /// based on a customizable field naming scheme. The lucene query syntax allows
-    /// implicit and explicit field definitions as query prefix followed by a colon
-    /// (':') character. The <see cref="ExtendableQueryParser"/> allows to encode extension
-    /// keys into the field symbol associated with a registered instance of
-    /// <see cref="ParserExtension"/>. A customizable separation character separates the
-    /// extension key from the actual field symbol. The <see cref="ExtendableQueryParser"/>
-    /// splits (<see cref="Extensions.SplitExtensionField(String, String)"/>) the
-    /// extension key from the field symbol and tries to resolve the associated
-    /// <see cref="ParserExtension"/>. If the parser can't resolve the key or the field
-    /// token does not contain a separation character, <see cref="ExtendableQueryParser"/>
-    /// yields the same behavior as its super class <see cref="QueryParser"/>. Otherwise,
-    /// if the key is associated with a <see cref="ParserExtension"/> instance, the parser
-    /// builds an instance of <see cref="ExtensionQuery"/> to be processed by
-    /// <see cref="ParserExtension.Parse(ExtensionQuery)"/>.If a extension field does not
-    /// contain a field part the default field for the query will be used.
-    /// <p>
-    /// To guarantee that an extension field is processed with its associated
-    /// extension, the extension query part must escape any special characters like
-    /// '*' or '['. If the extension query contains any whitespace characters, the
-    /// extension query part must be enclosed in quotes.
-    /// Example ('_' used as separation character):
-    /// <pre>
-    ///   title_customExt:"Apache Lucene\?" OR content_customExt:prefix\*
-    /// </pre>
-    /// 
-    /// Search on the default field:
-    /// <pre>
-    ///   _customExt:"Apache Lucene\?" OR _customExt:prefix\*
-    /// </pre>
-    /// </p>
-    /// <p>
-    /// The <see cref="ExtendableQueryParser"/> itself does not implement the logic how
-    /// field and extension key are separated or ordered. All logic regarding the
-    /// extension key and field symbol parsing is located in <see cref="Extensions"/>.
-    /// Customized extension schemes should be implemented by sub-classing
-    /// <see cref="Extensions"/>.
-    /// </p>
-    /// <p>
-    /// For details about the default encoding scheme see <see cref="Extensions"/>.
-    /// </p>
-    /// 
-    /// <see cref="Extensions"/>
-    /// <see cref="ParserExtension"/>
-    /// <see cref="ExtensionQuery"/>
-    /// </summary>
-    public class ExtendableQueryParser : Classic.QueryParser
-    {
-        private readonly string defaultField;
-        private readonly Extensions extensions;
-
-  
-        /// <summary>
-        ///  Default empty extensions instance
-        /// </summary>
-        private static readonly Extensions DEFAULT_EXTENSION = new Extensions();
-
-        /// <summary>
-        /// Creates a new <see cref="ExtendableQueryParser"/> instance
-        /// </summary>
-        /// <param name="matchVersion">the lucene version to use.</param>
-        /// <param name="f">the default query field</param>
-        /// <param name="a">the analyzer used to find terms in a query string</param>
-        public ExtendableQueryParser(LuceneVersion matchVersion, string f, Analyzer a)
-            : base(matchVersion, f, a)
-        {
-        }
-
-        /// <summary>
-        /// Creates a new <see cref="ExtendableQueryParser"/> instance
-        /// </summary>
-        /// <param name="matchVersion">the lucene version to use.</param>
-        /// <param name="f">the default query field</param>
-        /// <param name="a">the analyzer used to find terms in a query string</param>
-        /// <param name="ext">the query parser extensions</param>
-        public ExtendableQueryParser(LuceneVersion matchVersion, string f, Analyzer a, Extensions ext)
-            : base(matchVersion, f, a)
-        {
-            this.defaultField = f;
-            this.extensions = ext;
-        }
-
-        /// <summary>
-        /// Returns the extension field delimiter character.
-        /// </summary>
-        /// <returns>the extension field delimiter character.</returns>
-        public char ExtensionFieldDelimiter
-        {
-            get { return extensions.ExtensionFieldDelimiter; }
-        }
-
-        protected internal override Query GetFieldQuery(string field, string queryText, bool quoted)
-        {
-            Tuple<string, string> splitExtensionField = this.extensions
-                .SplitExtensionField(defaultField, field);
-            ParserExtension extension = this.extensions
-                .GetExtension(splitExtensionField.Item2);
-            if (extension != null)
-            {
-                return extension.Parse(new ExtensionQuery(this, splitExtensionField.Item1,
-                    queryText));
-            }
-            return base.GetFieldQuery(field, queryText, quoted);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs b/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs
deleted file mode 100644
index 610e4ad..0000000
--- a/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs
+++ /dev/null
@@ -1,54 +0,0 @@
-\ufeffnamespace Lucene.Net.QueryParser.Ext
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    /// <summary>
-    /// <see cref="ExtensionQuery"/> holds all query components extracted from the original
-    /// query string like the query field and the extension query string.
-    /// </summary>
-    public class ExtensionQuery
-    {
-        /// <summary>
-        /// Creates a new <see cref="ExtensionQuery"/>
-        /// </summary>
-        /// <param name="topLevelParser"></param>
-        /// <param name="field">the query field</param>
-        /// <param name="rawQueryString">the raw extension query string</param>
-        public ExtensionQuery(Classic.QueryParser topLevelParser, string field, string rawQueryString)
-        {
-            this.Field = field;
-            this.RawQueryString = rawQueryString;
-            this.TopLevelParser = topLevelParser;
-        }
-
-        /// <summary>
-        /// Returns the query field
-        /// </summary>
-        public string Field { get; protected set; }
-
-        /// <summary>
-        /// Returns the raw extension query string
-        /// </summary>
-        public string RawQueryString { get; protected set; }
-
-        /// <summary>
-        /// Returns the top level parser which created this <see cref="ExtensionQuery"/>
-        /// </summary>
-        public Classic.QueryParser TopLevelParser { get; protected set; }
-    }
-}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/Lucene.Net.QueryParser/Ext/Extensions.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Ext/Extensions.cs b/Lucene.Net.QueryParser/Ext/Extensions.cs
deleted file mode 100644
index 6895268..0000000
--- a/Lucene.Net.QueryParser/Ext/Extensions.cs
+++ /dev/null
@@ -1,167 +0,0 @@
-\ufeffusing Lucene.Net.QueryParser.Classic;
-using System;
-using System.Collections.Generic;
-using System.Text;
-
-namespace Lucene.Net.QueryParser.Ext
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    public class Extensions
-    {
-        private readonly IDictionary<string, ParserExtension> extensions = new Dictionary<string, ParserExtension>();
-        private readonly char extensionFieldDelimiter;
-
-        /// <summary>
-        /// The default extension field delimiter character. This constant is set to ':'
-        /// </summary>
-        public static readonly char DEFAULT_EXTENSION_FIELD_DELIMITER = ':';
-
-        /// <summary>
-        /// Creates a new <see cref="Extensions"/> instance with the
-        /// <see cref="#DEFAULT_EXTENSION_FIELD_DELIMITER"/> as a delimiter character.
-        /// </summary>
-        public Extensions()
-            : this(DEFAULT_EXTENSION_FIELD_DELIMITER)
-        {
-        }
-
-        /// <summary>
-        /// Creates a new <see cref="Extensions"/> instance
-        /// </summary>
-        /// <param name="extensionFieldDelimiter">the extensions field delimiter character</param>
-        public Extensions(char extensionFieldDelimiter)
-        {
-            this.extensionFieldDelimiter = extensionFieldDelimiter;
-        }
-
-        /// <summary>
-        /// Adds a new <see cref="ParserExtension"/> instance associated with the given key.
-        /// </summary>
-        /// <param name="key">the parser extension key</param>
-        /// <param name="extension">the parser extension</param>
-        public virtual void Add(string key, ParserExtension extension)
-        {
-            this.extensions[key] = extension;
-        }
-
-        /// <summary>
-        /// Returns the <see cref="ParserExtension"/> instance for the given key or
-        /// <code>null</code> if no extension can be found for the key.
-        /// </summary>
-        /// <param name="key">the extension key</param>
-        /// <returns>the <see cref="ParserExtension"/> instance for the given key or
-        /// <code>null</code> if no extension can be found for the key.</returns>
-        public ParserExtension GetExtension(string key)
-        {
-            if (key == null || !this.extensions.ContainsKey(key)) return null;
-            return this.extensions[key];
-        }
-
-        /// <summary>
-        /// Returns the extension field delimiter
-        /// </summary>
-        public virtual char ExtensionFieldDelimiter
-        {
-            get { return extensionFieldDelimiter; }
-        }
-
-        /// <summary>
-        /// Splits a extension field and returns the field / extension part as a
-        /// <see cref="Tuple{String,String}"/>. This method tries to split on the first occurrence of the
-        /// extension field delimiter, if the delimiter is not present in the string
-        /// the result will contain a <code>null</code> value for the extension key and
-        /// the given field string as the field value. If the given extension field
-        /// string contains no field identifier the result pair will carry the given
-        /// default field as the field value.
-        /// </summary>
-        /// <param name="defaultField">the default query field</param>
-        /// <param name="field">the extension field string</param>
-        /// <returns>a {<see cref="Tuple{String,String}"/> with the field name as the <see cref="Tuple{String,String}.Item1"/> and the
-        /// extension key as the <see cref="Tuple{String,String}.Item2"/></returns>
-        public Tuple<string, string> SplitExtensionField(string defaultField, string field)
-        {
-            int indexOf = field.IndexOf(this.extensionFieldDelimiter);
-            if (indexOf < 0)
-                return new Tuple<string, string>(field, null);
-            string indexField = indexOf == 0 ? defaultField : field.Substring(0, indexOf);
-            string extensionKey = field.Substring(indexOf + 1);
-            return new Tuple<string, string>(indexField, extensionKey);
-        }
-
-        /// <summary>
-        /// Escapes an extension field. The default implementation is equivalent to
-        /// <see cref="QueryParser.Escape(String)"/>.
-        /// </summary>
-        /// <param name="extfield">the extension field identifier</param>
-        /// <returns>the extension field identifier with all special chars escaped with
-        /// a backslash character.</returns>
-        public string EscapeExtensionField(string extfield)
-        {
-            return QueryParserBase.Escape(extfield);
-        }
-
-        /// <summary>
-        /// Builds an extension field string from a given extension key and the default
-        /// query field. The default field and the key are delimited with the extension
-        /// field delimiter character. This method makes no assumption about the order
-        /// of the extension key and the field. By default the extension key is
-        /// appended to the end of the returned string while the field is added to the
-        /// beginning. Special Query characters are escaped in the result.
-        /// <p>
-        /// Note: <see cref="Extensions"/> subclasses must maintain the contract between
-        /// <see cref="M:BuildExtensionField(String)"/> and
-        /// <see cref="M:BuildExtensionField(String, String)"/> where the latter inverts the
-        /// former.
-        /// </p>
-        /// </summary>
-        /// <param name="extensionKey">the extension key</param>
-        /// <returns>escaped extension field identifier</returns>
-        public string BuildExtensionField(string extensionKey)
-        {
-            return BuildExtensionField(extensionKey, "");
-        }
-
-        /// <summary>
-        /// Builds an extension field string from a given extension key and the default
-        /// query field. The default field and the key are delimited with the extension
-        /// field delimiter character. This method makes no assumption about the order
-        /// of the extension key and the field. By default the extension key is
-        /// appended to the end of the returned string while the field is added to the
-        /// beginning. Special Query characters are escaped in the result.
-        /// <p>
-        /// Note: <see cref="Extensions"/> subclasses must maintain the contract between
-        /// <see cref="M:BuildExtensionField(String)"/> and
-        /// <see cref="M:BuildExtensionField(String, String)"/> where the latter inverts the
-        /// former.
-        /// </summary>
-        /// <param name="extensionKey">the extension key</param>
-        /// <param name="field">the field to apply the extension on.</param>
-        /// <returns>escaped extension field identifier</returns>
-        /// <remarks>See <see cref="M:BuildExtensionField(String)"/> to use the default query field</remarks>
-        public string BuildExtensionField(string extensionKey, string field)
-        {
-            StringBuilder builder = new StringBuilder(field);
-            builder.Append(this.extensionFieldDelimiter);
-            builder.Append(extensionKey);
-            return EscapeExtensionField(builder.ToString());
-        }
-
-        // NOTE: Pair<T, T> was eliminated in favor of the built in Tuple<T, T> type.
-    }
-}


[13/50] [abbrv] lucenenet git commit: Ported QueryParser.Ext namespace + tests.

Posted by sy...@apache.org.
Ported QueryParser.Ext namespace + tests.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/e45f3289
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/e45f3289
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/e45f3289

Branch: refs/heads/master
Commit: e45f3289d5710bbcf3f9a4cbe995f31d05af9530
Parents: 071b60c
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Mon Aug 1 05:54:43 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:30:29 2016 +0700

----------------------------------------------------------------------
 .../Ext/ExtendableQueryParser.cs                | 131 +++++++++++++++
 Lucene.Net.QueryParser/Ext/ExtensionQuery.cs    |  54 ++++++
 Lucene.Net.QueryParser/Ext/Extensions.cs        | 167 +++++++++++++++++++
 Lucene.Net.QueryParser/Ext/ParserExtension.cs   |  50 ++++++
 .../Lucene.Net.QueryParser.csproj               |   4 +
 .../Ext/ExtensionStub.cs                        |  30 ++++
 .../Ext/TestExtendableQueryParser.cs            | 145 ++++++++++++++++
 .../Ext/TestExtensions.cs                       |  97 +++++++++++
 .../Lucene.Net.Tests.QueryParser.csproj         |   3 +
 .../Util/QueryParserTestBase.cs                 |   2 +-
 10 files changed, 682 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e45f3289/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs b/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs
new file mode 100644
index 0000000..6418f87
--- /dev/null
+++ b/Lucene.Net.QueryParser/Ext/ExtendableQueryParser.cs
@@ -0,0 +1,131 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Search;
+using Lucene.Net.Util;
+using System;
+
+namespace Lucene.Net.QueryParser.Ext
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// The <see cref="ExtendableQueryParser"/> enables arbitrary query parser extension
+    /// based on a customizable field naming scheme. The lucene query syntax allows
+    /// implicit and explicit field definitions as query prefix followed by a colon
+    /// (':') character. The <see cref="ExtendableQueryParser"/> allows to encode extension
+    /// keys into the field symbol associated with a registered instance of
+    /// <see cref="ParserExtension"/>. A customizable separation character separates the
+    /// extension key from the actual field symbol. The <see cref="ExtendableQueryParser"/>
+    /// splits (<see cref="Extensions.SplitExtensionField(String, String)"/>) the
+    /// extension key from the field symbol and tries to resolve the associated
+    /// <see cref="ParserExtension"/>. If the parser can't resolve the key or the field
+    /// token does not contain a separation character, <see cref="ExtendableQueryParser"/>
+    /// yields the same behavior as its super class <see cref="QueryParser"/>. Otherwise,
+    /// if the key is associated with a <see cref="ParserExtension"/> instance, the parser
+    /// builds an instance of <see cref="ExtensionQuery"/> to be processed by
+    /// <see cref="ParserExtension.Parse(ExtensionQuery)"/>.If a extension field does not
+    /// contain a field part the default field for the query will be used.
+    /// <p>
+    /// To guarantee that an extension field is processed with its associated
+    /// extension, the extension query part must escape any special characters like
+    /// '*' or '['. If the extension query contains any whitespace characters, the
+    /// extension query part must be enclosed in quotes.
+    /// Example ('_' used as separation character):
+    /// <pre>
+    ///   title_customExt:"Apache Lucene\?" OR content_customExt:prefix\*
+    /// </pre>
+    /// 
+    /// Search on the default field:
+    /// <pre>
+    ///   _customExt:"Apache Lucene\?" OR _customExt:prefix\*
+    /// </pre>
+    /// </p>
+    /// <p>
+    /// The <see cref="ExtendableQueryParser"/> itself does not implement the logic how
+    /// field and extension key are separated or ordered. All logic regarding the
+    /// extension key and field symbol parsing is located in <see cref="Extensions"/>.
+    /// Customized extension schemes should be implemented by sub-classing
+    /// <see cref="Extensions"/>.
+    /// </p>
+    /// <p>
+    /// For details about the default encoding scheme see <see cref="Extensions"/>.
+    /// </p>
+    /// 
+    /// <see cref="Extensions"/>
+    /// <see cref="ParserExtension"/>
+    /// <see cref="ExtensionQuery"/>
+    /// </summary>
+    public class ExtendableQueryParser : Classic.QueryParser
+    {
+        private readonly string defaultField;
+        private readonly Extensions extensions;
+
+  
+        /// <summary>
+        ///  Default empty extensions instance
+        /// </summary>
+        private static readonly Extensions DEFAULT_EXTENSION = new Extensions();
+
+        /// <summary>
+        /// Creates a new <see cref="ExtendableQueryParser"/> instance
+        /// </summary>
+        /// <param name="matchVersion">the lucene version to use.</param>
+        /// <param name="f">the default query field</param>
+        /// <param name="a">the analyzer used to find terms in a query string</param>
+        public ExtendableQueryParser(LuceneVersion matchVersion, string f, Analyzer a)
+            : base(matchVersion, f, a)
+        {
+        }
+
+        /// <summary>
+        /// Creates a new <see cref="ExtendableQueryParser"/> instance
+        /// </summary>
+        /// <param name="matchVersion">the lucene version to use.</param>
+        /// <param name="f">the default query field</param>
+        /// <param name="a">the analyzer used to find terms in a query string</param>
+        /// <param name="ext">the query parser extensions</param>
+        public ExtendableQueryParser(LuceneVersion matchVersion, string f, Analyzer a, Extensions ext)
+            : base(matchVersion, f, a)
+        {
+            this.defaultField = f;
+            this.extensions = ext;
+        }
+
+        /// <summary>
+        /// Returns the extension field delimiter character.
+        /// </summary>
+        /// <returns>the extension field delimiter character.</returns>
+        public char ExtensionFieldDelimiter
+        {
+            get { return extensions.ExtensionFieldDelimiter; }
+        }
+
+        protected internal override Query GetFieldQuery(string field, string queryText, bool quoted)
+        {
+            Tuple<string, string> splitExtensionField = this.extensions
+                .SplitExtensionField(defaultField, field);
+            ParserExtension extension = this.extensions
+                .GetExtension(splitExtensionField.Item2);
+            if (extension != null)
+            {
+                return extension.Parse(new ExtensionQuery(this, splitExtensionField.Item1,
+                    queryText));
+            }
+            return base.GetFieldQuery(field, queryText, quoted);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e45f3289/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs b/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs
new file mode 100644
index 0000000..610e4ad
--- /dev/null
+++ b/Lucene.Net.QueryParser/Ext/ExtensionQuery.cs
@@ -0,0 +1,54 @@
+\ufeffnamespace Lucene.Net.QueryParser.Ext
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// <see cref="ExtensionQuery"/> holds all query components extracted from the original
+    /// query string like the query field and the extension query string.
+    /// </summary>
+    public class ExtensionQuery
+    {
+        /// <summary>
+        /// Creates a new <see cref="ExtensionQuery"/>
+        /// </summary>
+        /// <param name="topLevelParser"></param>
+        /// <param name="field">the query field</param>
+        /// <param name="rawQueryString">the raw extension query string</param>
+        public ExtensionQuery(Classic.QueryParser topLevelParser, string field, string rawQueryString)
+        {
+            this.Field = field;
+            this.RawQueryString = rawQueryString;
+            this.TopLevelParser = topLevelParser;
+        }
+
+        /// <summary>
+        /// Returns the query field
+        /// </summary>
+        public string Field { get; protected set; }
+
+        /// <summary>
+        /// Returns the raw extension query string
+        /// </summary>
+        public string RawQueryString { get; protected set; }
+
+        /// <summary>
+        /// Returns the top level parser which created this <see cref="ExtensionQuery"/>
+        /// </summary>
+        public Classic.QueryParser TopLevelParser { get; protected set; }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e45f3289/Lucene.Net.QueryParser/Ext/Extensions.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Ext/Extensions.cs b/Lucene.Net.QueryParser/Ext/Extensions.cs
new file mode 100644
index 0000000..6895268
--- /dev/null
+++ b/Lucene.Net.QueryParser/Ext/Extensions.cs
@@ -0,0 +1,167 @@
+\ufeffusing Lucene.Net.QueryParser.Classic;
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Ext
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public class Extensions
+    {
+        private readonly IDictionary<string, ParserExtension> extensions = new Dictionary<string, ParserExtension>();
+        private readonly char extensionFieldDelimiter;
+
+        /// <summary>
+        /// The default extension field delimiter character. This constant is set to ':'
+        /// </summary>
+        public static readonly char DEFAULT_EXTENSION_FIELD_DELIMITER = ':';
+
+        /// <summary>
+        /// Creates a new <see cref="Extensions"/> instance with the
+        /// <see cref="#DEFAULT_EXTENSION_FIELD_DELIMITER"/> as a delimiter character.
+        /// </summary>
+        public Extensions()
+            : this(DEFAULT_EXTENSION_FIELD_DELIMITER)
+        {
+        }
+
+        /// <summary>
+        /// Creates a new <see cref="Extensions"/> instance
+        /// </summary>
+        /// <param name="extensionFieldDelimiter">the extensions field delimiter character</param>
+        public Extensions(char extensionFieldDelimiter)
+        {
+            this.extensionFieldDelimiter = extensionFieldDelimiter;
+        }
+
+        /// <summary>
+        /// Adds a new <see cref="ParserExtension"/> instance associated with the given key.
+        /// </summary>
+        /// <param name="key">the parser extension key</param>
+        /// <param name="extension">the parser extension</param>
+        public virtual void Add(string key, ParserExtension extension)
+        {
+            this.extensions[key] = extension;
+        }
+
+        /// <summary>
+        /// Returns the <see cref="ParserExtension"/> instance for the given key or
+        /// <code>null</code> if no extension can be found for the key.
+        /// </summary>
+        /// <param name="key">the extension key</param>
+        /// <returns>the <see cref="ParserExtension"/> instance for the given key or
+        /// <code>null</code> if no extension can be found for the key.</returns>
+        public ParserExtension GetExtension(string key)
+        {
+            if (key == null || !this.extensions.ContainsKey(key)) return null;
+            return this.extensions[key];
+        }
+
+        /// <summary>
+        /// Returns the extension field delimiter
+        /// </summary>
+        public virtual char ExtensionFieldDelimiter
+        {
+            get { return extensionFieldDelimiter; }
+        }
+
+        /// <summary>
+        /// Splits a extension field and returns the field / extension part as a
+        /// <see cref="Tuple{String,String}"/>. This method tries to split on the first occurrence of the
+        /// extension field delimiter, if the delimiter is not present in the string
+        /// the result will contain a <code>null</code> value for the extension key and
+        /// the given field string as the field value. If the given extension field
+        /// string contains no field identifier the result pair will carry the given
+        /// default field as the field value.
+        /// </summary>
+        /// <param name="defaultField">the default query field</param>
+        /// <param name="field">the extension field string</param>
+        /// <returns>a {<see cref="Tuple{String,String}"/> with the field name as the <see cref="Tuple{String,String}.Item1"/> and the
+        /// extension key as the <see cref="Tuple{String,String}.Item2"/></returns>
+        public Tuple<string, string> SplitExtensionField(string defaultField, string field)
+        {
+            int indexOf = field.IndexOf(this.extensionFieldDelimiter);
+            if (indexOf < 0)
+                return new Tuple<string, string>(field, null);
+            string indexField = indexOf == 0 ? defaultField : field.Substring(0, indexOf);
+            string extensionKey = field.Substring(indexOf + 1);
+            return new Tuple<string, string>(indexField, extensionKey);
+        }
+
+        /// <summary>
+        /// Escapes an extension field. The default implementation is equivalent to
+        /// <see cref="QueryParser.Escape(String)"/>.
+        /// </summary>
+        /// <param name="extfield">the extension field identifier</param>
+        /// <returns>the extension field identifier with all special chars escaped with
+        /// a backslash character.</returns>
+        public string EscapeExtensionField(string extfield)
+        {
+            return QueryParserBase.Escape(extfield);
+        }
+
+        /// <summary>
+        /// Builds an extension field string from a given extension key and the default
+        /// query field. The default field and the key are delimited with the extension
+        /// field delimiter character. This method makes no assumption about the order
+        /// of the extension key and the field. By default the extension key is
+        /// appended to the end of the returned string while the field is added to the
+        /// beginning. Special Query characters are escaped in the result.
+        /// <p>
+        /// Note: <see cref="Extensions"/> subclasses must maintain the contract between
+        /// <see cref="M:BuildExtensionField(String)"/> and
+        /// <see cref="M:BuildExtensionField(String, String)"/> where the latter inverts the
+        /// former.
+        /// </p>
+        /// </summary>
+        /// <param name="extensionKey">the extension key</param>
+        /// <returns>escaped extension field identifier</returns>
+        public string BuildExtensionField(string extensionKey)
+        {
+            return BuildExtensionField(extensionKey, "");
+        }
+
+        /// <summary>
+        /// Builds an extension field string from a given extension key and the default
+        /// query field. The default field and the key are delimited with the extension
+        /// field delimiter character. This method makes no assumption about the order
+        /// of the extension key and the field. By default the extension key is
+        /// appended to the end of the returned string while the field is added to the
+        /// beginning. Special Query characters are escaped in the result.
+        /// <p>
+        /// Note: <see cref="Extensions"/> subclasses must maintain the contract between
+        /// <see cref="M:BuildExtensionField(String)"/> and
+        /// <see cref="M:BuildExtensionField(String, String)"/> where the latter inverts the
+        /// former.
+        /// </summary>
+        /// <param name="extensionKey">the extension key</param>
+        /// <param name="field">the field to apply the extension on.</param>
+        /// <returns>escaped extension field identifier</returns>
+        /// <remarks>See <see cref="M:BuildExtensionField(String)"/> to use the default query field</remarks>
+        public string BuildExtensionField(string extensionKey, string field)
+        {
+            StringBuilder builder = new StringBuilder(field);
+            builder.Append(this.extensionFieldDelimiter);
+            builder.Append(extensionKey);
+            return EscapeExtensionField(builder.ToString());
+        }
+
+        // NOTE: Pair<T, T> was eliminated in favor of the built in Tuple<T, T> type.
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e45f3289/Lucene.Net.QueryParser/Ext/ParserExtension.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Ext/ParserExtension.cs b/Lucene.Net.QueryParser/Ext/ParserExtension.cs
new file mode 100644
index 0000000..27b9212
--- /dev/null
+++ b/Lucene.Net.QueryParser/Ext/ParserExtension.cs
@@ -0,0 +1,50 @@
+\ufeffusing Lucene.Net.QueryParser.Classic;
+using Lucene.Net.Search;
+
+namespace Lucene.Net.QueryParser.Ext
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// This class represents an extension base class to the Lucene standard
+    /// <see cref="Classic.QueryParser"/>. The
+    /// <see cref="Classic.QueryParser"/> is generated by the JavaCC
+    /// parser generator. Changing or adding functionality or syntax in the standard
+    /// query parser requires changes to the JavaCC source file. To enable extending
+    /// the standard query parser without changing the JavaCC sources and re-generate
+    /// the parser the <see cref="ParserExtension"/> can be customized and plugged into an
+    /// instance of <see cref="ExtendableQueryParser"/>, a direct subclass of
+    /// <see cref="Classic.QueryParser"/>.
+    ///  
+    /// <see cref="Extensions"/>
+    /// <see cref="ExtendableQueryParser"/>
+    /// </summary>
+    public abstract class ParserExtension
+    {
+        /// <summary>
+        /// Processes the given <see cref="ExtensionQuery"/> and returns a corresponding
+        /// <see cref="Query"/> instance. Subclasses must either return a <see cref="Query"/>
+        /// instance or raise a <see cref="ParseException"/>. This method must not return
+        /// <code>null</code>.
+        /// </summary>
+        /// <param name="query">the extension query</param>
+        /// <returns>a new query instance</returns>
+        /// <exception cref="ParseException">if the query can not be parsed.</exception>
+        public abstract Query Parse(ExtensionQuery query);
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e45f3289/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj b/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
index 0b18336..b42ed22 100644
--- a/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
+++ b/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
@@ -51,6 +51,10 @@
     <Compile Include="Classic\Token.cs" />
     <Compile Include="Classic\TokenMgrError.cs" />
     <Compile Include="ComplexPhrase\ComplexPhraseQueryParser.cs" />
+    <Compile Include="Ext\ExtendableQueryParser.cs" />
+    <Compile Include="Ext\ExtensionQuery.cs" />
+    <Compile Include="Ext\Extensions.cs" />
+    <Compile Include="Ext\ParserExtension.cs" />
     <Compile Include="Flexible\Standard\CommonQueryParserConfiguration.cs" />
     <Compile Include="Properties\AssemblyInfo.cs" />
   </ItemGroup>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e45f3289/Lucene.Net.Tests.QueryParser/Ext/ExtensionStub.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Ext/ExtensionStub.cs b/Lucene.Net.Tests.QueryParser/Ext/ExtensionStub.cs
new file mode 100644
index 0000000..cbef5d8
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Ext/ExtensionStub.cs
@@ -0,0 +1,30 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Search;
+
+namespace Lucene.Net.QueryParser.Ext
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    internal class ExtensionStub : ParserExtension
+    {
+        public override Query Parse(ExtensionQuery components)
+        {
+            return new TermQuery(new Term(components.Field, components.RawQueryString));
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e45f3289/Lucene.Net.Tests.QueryParser/Ext/TestExtendableQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Ext/TestExtendableQueryParser.cs b/Lucene.Net.Tests.QueryParser/Ext/TestExtendableQueryParser.cs
new file mode 100644
index 0000000..7e2e99e
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Ext/TestExtendableQueryParser.cs
@@ -0,0 +1,145 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.QueryParser.Classic;
+using Lucene.Net.Search;
+using NUnit.Framework;
+using System.Globalization;
+
+namespace Lucene.Net.QueryParser.Ext
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Testcase for the class <see cref="ExtendableQueryParser"/>
+    /// </summary>
+    [TestFixture]
+    public class TestExtendableQueryParser : TestQueryParser
+    {
+        private static char[] DELIMITERS = new char[] {
+            Extensions.DEFAULT_EXTENSION_FIELD_DELIMITER, '-', '|' };
+
+        public override Classic.QueryParser GetParser(Analyzer a)
+        {
+            return GetParser(a, null);
+        }
+
+        public Classic.QueryParser GetParser(Analyzer a, Extensions extensions)
+        {
+            if (a == null)
+                a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+            Classic.QueryParser qp = extensions == null ? new ExtendableQueryParser(
+                TEST_VERSION_CURRENT, DefaultField, a) : new ExtendableQueryParser(
+                TEST_VERSION_CURRENT, DefaultField, a, extensions);
+            qp.DefaultOperator = QueryParserBase.OR_OPERATOR;
+            return qp;
+        }
+
+        [Test]
+        public void TestUnescapedExtDelimiter()
+        {
+            Extensions ext = NewExtensions(':');
+            ext.Add("testExt", new ExtensionStub());
+            ExtendableQueryParser parser = (ExtendableQueryParser)GetParser(null, ext);
+            try
+            {
+                parser.Parse("aField:testExt:\"foo \\& bar\"");
+                fail("extension field delimiter is not escaped");
+            }
+            catch (ParseException e)
+            {
+            }
+        }
+
+        [Test]
+        public void TestExtFieldUnqoted()
+        {
+            for (int i = 0; i < DELIMITERS.Length; i++)
+            {
+                Extensions ext = NewExtensions(DELIMITERS[i]);
+                ext.Add("testExt", new ExtensionStub());
+                ExtendableQueryParser parser = (ExtendableQueryParser)GetParser(null,
+                    ext);
+                string field = ext.BuildExtensionField("testExt", "aField");
+                Query query = parser.Parse(string.Format(CultureInfo.InvariantCulture, "{0}:foo bar", field));
+                assertTrue("expected instance of BooleanQuery but was "
+                    + query.GetType(), query is BooleanQuery);
+                BooleanQuery bquery = (BooleanQuery)query;
+                BooleanClause[] clauses = bquery.Clauses;
+                assertEquals(2, clauses.Length);
+                BooleanClause booleanClause = clauses[0];
+                query = booleanClause.Query;
+                assertTrue("expected instance of TermQuery but was " + query.GetType(),
+                    query is TermQuery);
+                TermQuery tquery = (TermQuery)query;
+                assertEquals("aField", tquery.Term
+                    .Field);
+                assertEquals("foo", tquery.Term.Text());
+
+                booleanClause = clauses[1];
+                query = booleanClause.Query;
+                assertTrue("expected instance of TermQuery but was " + query.GetType(),
+                    query is TermQuery);
+                tquery = (TermQuery)query;
+                assertEquals(DefaultField, tquery.Term.Field);
+                assertEquals("bar", tquery.Term.Text());
+            }
+        }
+
+        [Test]
+        public void TestExtDefaultField()
+        {
+            for (int i = 0; i < DELIMITERS.Length; i++)
+            {
+                Extensions ext = NewExtensions(DELIMITERS[i]);
+                ext.Add("testExt", new ExtensionStub());
+                ExtendableQueryParser parser = (ExtendableQueryParser)GetParser(null,
+                    ext);
+                string field = ext.BuildExtensionField("testExt");
+                Query parse = parser.Parse(string.Format(CultureInfo.InvariantCulture, "{0}:\"foo \\& bar\"", field));
+                assertTrue("expected instance of TermQuery but was " + parse.GetType(),
+                    parse is TermQuery);
+                TermQuery tquery = (TermQuery)parse;
+                assertEquals(DefaultField, tquery.Term.Field);
+                assertEquals("foo & bar", tquery.Term.Text());
+            }
+        }
+
+        public Extensions NewExtensions(char delimiter)
+        {
+            return new Extensions(delimiter);
+        }
+
+        [Test]
+        public void TestExtField()
+        {
+            for (int i = 0; i < DELIMITERS.Length; i++)
+            {
+                Extensions ext = NewExtensions(DELIMITERS[i]);
+                ext.Add("testExt", new ExtensionStub());
+                ExtendableQueryParser parser = (ExtendableQueryParser)GetParser(null,
+                    ext);
+                string field = ext.BuildExtensionField("testExt", "afield");
+                Query parse = parser.Parse(string.Format(CultureInfo.InvariantCulture, "{0}:\"foo \\& bar\"", field));
+                assertTrue("expected instance of TermQuery but was " + parse.GetType(),
+                    parse is TermQuery);
+                TermQuery tquery = (TermQuery)parse;
+                assertEquals("afield", tquery.Term.Field);
+                assertEquals("foo & bar", tquery.Term.Text());
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e45f3289/Lucene.Net.Tests.QueryParser/Ext/TestExtensions.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Ext/TestExtensions.cs b/Lucene.Net.Tests.QueryParser/Ext/TestExtensions.cs
new file mode 100644
index 0000000..4850987
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Ext/TestExtensions.cs
@@ -0,0 +1,97 @@
+\ufeffusing Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.QueryParser.Ext
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Testcase for the <see cref="Extensions"/> class
+    /// </summary>
+    [TestFixture]
+    public class TestExtensions : LuceneTestCase
+    {
+        private Extensions ext;
+
+        public override void SetUp()
+        {
+            base.SetUp();
+            this.ext = new Extensions();
+        }
+
+        [Test]
+        public void TestBuildExtensionField()
+        {
+            assertEquals("field\\:key", ext.BuildExtensionField("key", "field"));
+            assertEquals("\\:key", ext.BuildExtensionField("key"));
+
+            ext = new Extensions('.');
+            assertEquals("field.key", ext.BuildExtensionField("key", "field"));
+            assertEquals(".key", ext.BuildExtensionField("key"));
+        }
+
+        [Test]
+        public void TestSplitExtensionField()
+        {
+            assertEquals("field\\:key", ext.BuildExtensionField("key", "field"));
+            assertEquals("\\:key", ext.BuildExtensionField("key"));
+            
+            ext = new Extensions('.');
+            assertEquals("field.key", ext.BuildExtensionField("key", "field"));
+            assertEquals(".key", ext.BuildExtensionField("key"));
+        }
+
+        [Test]
+        public void TestAddGetExtension()
+        {
+            ParserExtension extension = new ExtensionStub();
+            assertNull(ext.GetExtension("foo"));
+            ext.Add("foo", extension);
+            Assert.AreSame(extension, ext.GetExtension("foo"));
+            ext.Add("foo", null);
+            assertNull(ext.GetExtension("foo"));
+        }
+
+        [Test]
+        public void TestGetExtDelimiter()
+        {
+            assertEquals(Extensions.DEFAULT_EXTENSION_FIELD_DELIMITER, this.ext
+                .ExtensionFieldDelimiter);
+            ext = new Extensions('?');
+            assertEquals('?', this.ext.ExtensionFieldDelimiter);
+        }
+
+        [Test]
+        public void TestEscapeExtension()
+        {
+            assertEquals("abc\\:\\?\\{\\}\\[\\]\\\\\\(\\)\\+\\-\\!\\~", ext
+                .EscapeExtensionField("abc:?{}[]\\()+-!~"));
+            try
+            {
+                ext.EscapeExtensionField(null);
+                fail("should throw NPE - escape string is null");
+            }
+            //catch (NullPointerException e)
+            catch (Exception e)
+            {
+                // 
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e45f3289/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj b/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
index b263dc8..adab182 100644
--- a/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
+++ b/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
@@ -48,6 +48,9 @@
     <Compile Include="Classic\TestMultiPhraseQueryParsing.cs" />
     <Compile Include="Classic\TestQueryParser.cs" />
     <Compile Include="ComplexPhrase\TestComplexPhraseQuery.cs" />
+    <Compile Include="Ext\ExtensionStub.cs" />
+    <Compile Include="Ext\TestExtendableQueryParser.cs" />
+    <Compile Include="Ext\TestExtensions.cs" />
     <Compile Include="Properties\AssemblyInfo.cs" />
     <Compile Include="Classic\TestMultiAnalyzer.cs" />
     <Compile Include="Util\QueryParserTestBase.cs" />

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/e45f3289/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs b/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
index bcf8792..b879008 100644
--- a/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
+++ b/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
@@ -114,7 +114,7 @@ namespace Lucene.Net.QueryParser.Util
         }
 
         // Moved from TestQueryParser
-        public Classic.QueryParser GetParser(Analyzer a)
+        public virtual Classic.QueryParser GetParser(Analyzer a)
         {
             if (a == null) a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
             Classic.QueryParser qp = new Classic.QueryParser(TEST_VERSION_CURRENT, DefaultField, a);


[14/50] [abbrv] lucenenet git commit: Ported QueryParser.Simple namespace + tests.

Posted by sy...@apache.org.
Ported QueryParser.Simple namespace + tests.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/6224f3e2
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/6224f3e2
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/6224f3e2

Branch: refs/heads/master
Commit: 6224f3e295c61defce4a5c6dfd5dd9458b030777
Parents: e45f328
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Mon Aug 1 15:19:34 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:30:34 2016 +0700

----------------------------------------------------------------------
 .../Lucene.Net.QueryParser.csproj               |   1 +
 .../Simple/SimpleQueryParser.cs                 | 788 +++++++++++++++++++
 .../Lucene.Net.Tests.QueryParser.csproj         |   1 +
 .../Simple/TestSimpleQueryParser.cs             | 728 +++++++++++++++++
 4 files changed, 1518 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6224f3e2/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj b/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
index b42ed22..646e931 100644
--- a/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
+++ b/Lucene.Net.QueryParser/Lucene.Net.QueryParser.csproj
@@ -57,6 +57,7 @@
     <Compile Include="Ext\ParserExtension.cs" />
     <Compile Include="Flexible\Standard\CommonQueryParserConfiguration.cs" />
     <Compile Include="Properties\AssemblyInfo.cs" />
+    <Compile Include="Simple\SimpleQueryParser.cs" />
   </ItemGroup>
   <ItemGroup>
     <ProjectReference Include="..\src\Lucene.Net.Analysis.Common\Lucene.Net.Analysis.Common.csproj">

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6224f3e2/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs b/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
new file mode 100644
index 0000000..8607d27
--- /dev/null
+++ b/Lucene.Net.QueryParser/Simple/SimpleQueryParser.cs
@@ -0,0 +1,788 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using Lucene.Net.Util.Automaton;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Linq;
+
+namespace Lucene.Net.QueryParser.Simple
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// SimpleQueryParser is used to parse human readable query syntax.
+    /// <p>
+    /// The main idea behind this parser is that a person should be able to type
+    /// whatever they want to represent a query, and this parser will do its best
+    /// to interpret what to search for no matter how poorly composed the request
+    /// may be. Tokens are considered to be any of a term, phrase, or subquery for the
+    /// operations described below.  Whitespace including ' ' '\n' '\r' and '\t'
+    /// and certain operators may be used to delimit tokens ( ) + | " .
+    /// <p>
+    /// Any errors in query syntax will be ignored and the parser will attempt
+    /// to decipher what it can; however, this may mean odd or unexpected results.
+    /// <h4>Query Operators</h4>
+    /// <ul>
+    ///  <li>'{@code +}' specifies {@code AND} operation: <tt>token1+token2</tt>
+    ///  <li>'{@code |}' specifies {@code OR} operation: <tt>token1|token2</tt>
+    ///  <li>'{@code -}' negates a single token: <tt>-token0</tt>
+    ///  <li>'{@code "}' creates phrases of terms: <tt>"term1 term2 ..."</tt>
+    ///  <li>'{@code *}' at the end of terms specifies prefix query: <tt>term*</tt>
+    ///  <li>'{@code ~}N' at the end of terms specifies fuzzy query: <tt>term~1</tt>
+    ///  <li>'{@code ~}N' at the end of phrases specifies near query: <tt>"term1 term2"~5</tt>
+    ///  <li>'{@code (}' and '{@code )}' specifies precedence: <tt>token1 + (token2 | token3)</tt>
+    /// </ul>
+    /// <p>
+    /// The {@link #setDefaultOperator default operator} is {@code OR} if no other operator is specified.
+    /// For example, the following will {@code OR} {@code token1} and {@code token2} together:
+    /// <tt>token1 token2</tt>
+    /// <p>
+    /// Normal operator precedence will be simple order from right to left.
+    /// For example, the following will evaluate {@code token1 OR token2} first,
+    /// then {@code AND} with {@code token3}:
+    /// <blockquote>token1 | token2 + token3</blockquote>
+    /// <h4>Escaping</h4>
+    /// <p>
+    /// An individual term may contain any possible character with certain characters
+    /// requiring escaping using a '{@code \}'.  The following characters will need to be escaped in
+    /// terms and phrases:
+    /// {@code + | " ( ) ' \}
+    /// <p>
+    /// The '{@code -}' operator is a special case.  On individual terms (not phrases) the first
+    /// character of a term that is {@code -} must be escaped; however, any '{@code -}' characters
+    /// beyond the first character do not need to be escaped.
+    /// For example:
+    /// <ul>
+    ///   <li>{@code -term1}   -- Specifies {@code NOT} operation against {@code term1}
+    ///   <li>{@code \-term1}  -- Searches for the term {@code -term1}.
+    ///   <li>{@code term-1}   -- Searches for the term {@code term-1}.
+    ///   <li>{@code term\-1}  -- Searches for the term {@code term-1}.
+    /// </ul>
+    /// <p>
+    /// The '{@code *}' operator is a special case. On individual terms (not phrases) the last
+    /// character of a term that is '{@code *}' must be escaped; however, any '{@code *}' characters
+    /// before the last character do not need to be escaped:
+    /// <ul>
+    ///   <li>{@code term1*}  --  Searches for the prefix {@code term1}
+    ///   <li>{@code term1\*} --  Searches for the term {@code term1*}
+    ///   <li>{@code term*1}  --  Searches for the term {@code term*1}
+    ///   <li>{@code term\*1} --  Searches for the term {@code term*1}
+    /// </ul>
+    /// <p>
+    /// Note that above examples consider the terms before text processing.
+    /// </summary>
+    public class SimpleQueryParser : QueryBuilder
+    {
+        /** Map of fields to query against with their weights */
+        protected readonly IDictionary<string, float> weights;
+
+        // TODO: Make these into a [Flags] enum??
+        /** flags to the parser (to turn features on/off) */
+        protected readonly int flags;
+
+        /** Enables {@code AND} operator (+) */
+        public static readonly int AND_OPERATOR         = 1<<0;
+        /** Enables {@code NOT} operator (-) */
+        public static readonly int NOT_OPERATOR         = 1<<1;
+        /** Enables {@code OR} operator (|) */
+        public static readonly int OR_OPERATOR          = 1<<2;
+        /** Enables {@code PREFIX} operator (*) */
+        public static readonly int PREFIX_OPERATOR      = 1<<3;
+        /** Enables {@code PHRASE} operator (") */
+        public static readonly int PHRASE_OPERATOR      = 1<<4;
+        /** Enables {@code PRECEDENCE} operators: {@code (} and {@code )} */
+        public static readonly int PRECEDENCE_OPERATORS = 1<<5;
+        /** Enables {@code ESCAPE} operator (\) */
+        public static readonly int ESCAPE_OPERATOR      = 1<<6;
+        /** Enables {@code WHITESPACE} operators: ' ' '\n' '\r' '\t' */
+        public static readonly int WHITESPACE_OPERATOR  = 1<<7;
+        /** Enables {@code FUZZY} operators: (~) on single terms */
+        public static readonly int FUZZY_OPERATOR       = 1<<8;
+        /** Enables {@code NEAR} operators: (~) on phrases */
+        public static readonly int NEAR_OPERATOR        = 1<<9;
+
+        private BooleanClause.Occur defaultOperator = BooleanClause.Occur.SHOULD;
+
+        /// <summary>
+        /// Creates a new parser searching over a single field.
+        /// </summary>
+        /// <param name="analyzer"></param>
+        /// <param name="field"></param>
+        public SimpleQueryParser(Analyzer analyzer, string field)
+            : this(analyzer, new HashMap<string, float>() { { field, 1.0F } })
+        {
+        }
+
+        /// <summary>
+        /// Creates a new parser searching over multiple fields with different weights.
+        /// </summary>
+        /// <param name="analyzer"></param>
+        /// <param name="weights"></param>
+        public SimpleQueryParser(Analyzer analyzer, IDictionary<string, float> weights)
+            : this(analyzer, weights, -1)
+        {
+        }
+
+        /// <summary>
+        /// Creates a new parser with custom flags used to enable/disable certain features.
+        /// </summary>
+        /// <param name="analyzer"></param>
+        /// <param name="weights"></param>
+        /// <param name="flags"></param>
+        public SimpleQueryParser(Analyzer analyzer, IDictionary<string, float> weights, int flags)
+            : base(analyzer)
+        {
+            this.weights = weights;
+            this.flags = flags;
+        }
+
+        /// <summary>
+        /// Parses the query text and returns parsed query (or null if empty)
+        /// </summary>
+        /// <param name="queryText"></param>
+        /// <returns></returns>
+        public Query Parse(string queryText)
+        {
+            char[] data = queryText.ToCharArray();
+            char[] buffer = new char[data.Length];
+
+            State state = new State(data, buffer, 0, data.Length);
+            ParseSubQuery(state);
+            return state.Top;
+        }
+
+        private void ParseSubQuery(State state)
+        {
+            while (state.Index < state.Length)
+            {
+                if (state.Data[state.Index] == '(' && (flags & PRECEDENCE_OPERATORS) != 0)
+                {
+                    // the beginning of a subquery has been found
+                    ConsumeSubQuery(state);
+                }
+                else if (state.Data[state.Index] == ')' && (flags & PRECEDENCE_OPERATORS) != 0)
+                {
+                    // this is an extraneous character so it is ignored
+                    ++state.Index;
+                }
+                else if (state.Data[state.Index] == '"' && (flags & PHRASE_OPERATOR) != 0)
+                {
+                    // the beginning of a phrase has been found
+                    ConsumePhrase(state);
+                }
+                else if (state.Data[state.Index] == '+' && (flags & AND_OPERATOR) != 0)
+                {
+                    // an and operation has been explicitly set
+                    // if an operation has already been set this one is ignored
+                    // if a term (or phrase or subquery) has not been found yet the
+                    // operation is also ignored since there is no previous
+                    // term (or phrase or subquery) to and with
+                    if (!state.CurrentOperationIsSet && state.Top != null)
+                    {
+                        state.CurrentOperation = BooleanClause.Occur.MUST;
+                    }
+
+                    ++state.Index;
+                }
+                else if (state.Data[state.Index] == '|' && (flags & OR_OPERATOR) != 0)
+                {
+                    // an or operation has been explicitly set
+                    // if an operation has already been set this one is ignored
+                    // if a term (or phrase or subquery) has not been found yet the
+                    // operation is also ignored since there is no previous
+                    // term (or phrase or subquery) to or with
+                    if (!state.CurrentOperationIsSet && state.Top != null)
+                    {
+                        state.CurrentOperation = BooleanClause.Occur.SHOULD;
+                    }
+
+                    ++state.Index;
+                }
+                else if (state.Data[state.Index] == '-' && (flags & NOT_OPERATOR) != 0)
+                {
+                    // a not operator has been found, so increase the not count
+                    // two not operators in a row negate each other
+                    ++state.Not;
+                    ++state.Index;
+
+                    // continue so the not operator is not reset
+                    // before the next character is determined
+                    continue;
+                }
+                else if ((state.Data[state.Index] == ' '
+                  || state.Data[state.Index] == '\t'
+                  || state.Data[state.Index] == '\n'
+                  || state.Data[state.Index] == '\r') && (flags & WHITESPACE_OPERATOR) != 0)
+                {
+                    // ignore any whitespace found as it may have already been
+                    // used a delimiter across a term (or phrase or subquery)
+                    // or is simply extraneous
+                    ++state.Index;
+                }
+                else
+                {
+                    // the beginning of a token has been found
+                    ConsumeToken(state);
+                }
+
+                // reset the not operator as even whitespace is not allowed when
+                // specifying the not operation for a term (or phrase or subquery)
+                state.Not = 0;
+            }
+        }
+
+        private void ConsumeSubQuery(State state)
+        {
+            Debug.Assert((flags & PRECEDENCE_OPERATORS) != 0);
+            int start = ++state.Index;
+            int precedence = 1;
+            bool escaped = false;
+
+            while (state.Index < state.Length)
+            {
+                if (!escaped)
+                {
+                    if (state.Data[state.Index] == '\\' && (flags & ESCAPE_OPERATOR) != 0)
+                    {
+                        // an escape character has been found so
+                        // whatever character is next will become
+                        // part of the subquery unless the escape
+                        // character is the last one in the data
+                        escaped = true;
+                        ++state.Index;
+
+                        continue;
+                    }
+                    else if (state.Data[state.Index] == '(')
+                    {
+                        // increase the precedence as there is a
+                        // subquery in the current subquery
+                        ++precedence;
+                    }
+                    else if (state.Data[state.Index] == ')')
+                    {
+                        --precedence;
+
+                        if (precedence == 0)
+                        {
+                            // this should be the end of the subquery
+                            // all characters found will used for
+                            // creating the subquery
+                            break;
+                        }
+                    }
+                }
+
+                escaped = false;
+                ++state.Index;
+            }
+
+            if (state.Index == state.Length)
+            {
+                // a closing parenthesis was never found so the opening
+                // parenthesis is considered extraneous and will be ignored
+                state.Index = start;
+            }
+            else if (state.Index == start)
+            {
+                // a closing parenthesis was found immediately after the opening
+                // parenthesis so the current operation is reset since it would
+                // have been applied to this subquery
+                state.CurrentOperationIsSet = false;
+
+                ++state.Index;
+            }
+            else
+            {
+                // a complete subquery has been found and is recursively parsed by
+                // starting over with a new state object
+                State subState = new State(state.Data, state.Buffer, start, state.Index);
+                ParseSubQuery(subState);
+                BuildQueryTree(state, subState.Top);
+
+                ++state.Index;
+            }
+        }
+
+        private void ConsumePhrase(State state)
+        {
+            Debug.Assert((flags & PHRASE_OPERATOR) != 0);
+            int start = ++state.Index;
+            int copied = 0;
+            bool escaped = false;
+            bool hasSlop = false;
+
+            while (state.Index < state.Length)
+            {
+                if (!escaped)
+                {
+                    if (state.Data[state.Index] == '\\' && (flags & ESCAPE_OPERATOR) != 0)
+                    {
+                        // an escape character has been found so
+                        // whatever character is next will become
+                        // part of the phrase unless the escape
+                        // character is the last one in the data
+                        escaped = true;
+                        ++state.Index;
+
+                        continue;
+                    }
+                    else if (state.Data[state.Index] == '"')
+                    {
+                        // if there are still characters after the closing ", check for a
+                        // tilde
+                        if (state.Length > (state.Index + 1) &&
+                            state.Data[state.Index + 1] == '~' &&
+                            (flags & NEAR_OPERATOR) != 0)
+                        {
+                            state.Index++;
+                            // check for characters after the tilde
+                            if (state.Length > (state.Index + 1))
+                            {
+                                hasSlop = true;
+                            }
+                            break;
+                        }
+                        else
+                        {
+                            // this should be the end of the phrase
+                            // all characters found will used for
+                            // creating the phrase query
+                            break;
+                        }
+                    }
+                }
+
+                escaped = false;
+                state.Buffer[copied++] = state.Data[state.Index++];
+            }
+
+            if (state.Index == state.Length)
+            {
+                // a closing double quote was never found so the opening
+                // double quote is considered extraneous and will be ignored
+                state.Index = start;
+            }
+            else if (state.Index == start)
+            {
+                // a closing double quote was found immediately after the opening
+                // double quote so the current operation is reset since it would
+                // have been applied to this phrase
+                state.CurrentOperationIsSet = false;
+
+                ++state.Index;
+            }
+            else
+            {
+                // a complete phrase has been found and is parsed through
+                // through the analyzer from the given field
+                string phrase = new string(state.Buffer, 0, copied);
+                Query branch;
+                if (hasSlop)
+                {
+                    branch = NewPhraseQuery(phrase, ParseFuzziness(state));
+                }
+                else
+                {
+                    branch = NewPhraseQuery(phrase, 0);
+                }
+                BuildQueryTree(state, branch);
+
+                ++state.Index;
+            }
+        }
+
+        private void ConsumeToken(State state)
+        {
+            int copied = 0;
+            bool escaped = false;
+            bool prefix = false;
+            bool fuzzy = false;
+
+            while (state.Index < state.Length)
+            {
+                if (!escaped)
+                {
+                    if (state.Data[state.Index] == '\\' && (flags & ESCAPE_OPERATOR) != 0)
+                    {
+                        // an escape character has been found so
+                        // whatever character is next will become
+                        // part of the term unless the escape
+                        // character is the last one in the data
+                        escaped = true;
+                        prefix = false;
+                        ++state.Index;
+
+                        continue;
+                    }
+                    else if (TokenFinished(state))
+                    {
+                        // this should be the end of the term
+                        // all characters found will used for
+                        // creating the term query
+                        break;
+                    }
+                    else if (copied > 0 && state.Data[state.Index] == '~' && (flags & FUZZY_OPERATOR) != 0)
+                    {
+                        fuzzy = true;
+                        break;
+                    }
+
+                    // wildcard tracks whether or not the last character
+                    // was a '*' operator that hasn't been escaped
+                    // there must be at least one valid character before
+                    // searching for a prefixed set of terms
+                    prefix = copied > 0 && state.Data[state.Index] == '*' && (flags & PREFIX_OPERATOR) != 0;
+                }
+
+                escaped = false;
+                state.Buffer[copied++] = state.Data[state.Index++];
+            }
+
+            if (copied > 0)
+            {
+                Query branch;
+
+                if (fuzzy && (flags & FUZZY_OPERATOR) != 0)
+                {
+                    string token = new string(state.Buffer, 0, copied);
+                    int fuzziness = ParseFuzziness(state);
+                    // edit distance has a maximum, limit to the maximum supported
+                    fuzziness = Math.Min(fuzziness, LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE);
+                    if (fuzziness == 0)
+                    {
+                        branch = NewDefaultQuery(token);
+                    }
+                    else
+                    {
+                        branch = NewFuzzyQuery(token, fuzziness);
+                    }
+                }
+                else if (prefix)
+                {
+                    // if a term is found with a closing '*' it is considered to be a prefix query
+                    // and will have prefix added as an option
+                    string token = new string(state.Buffer, 0, copied - 1);
+                    branch = NewPrefixQuery(token);
+                }
+                else
+                {
+                    // a standard term has been found so it will be run through
+                    // the entire analysis chain from the specified schema field
+                    string token = new string(state.Buffer, 0, copied);
+                    branch = NewDefaultQuery(token);
+                }
+
+                BuildQueryTree(state, branch);
+            }
+        }
+
+        /// <summary>
+        /// buildQueryTree should be called after a term, phrase, or subquery
+        /// is consumed to be added to our existing query tree
+        /// this method will only add to the existing tree if the branch contained in state is not null
+        /// </summary>
+        /// <param name="state"></param>
+        /// <param name="branch"></param>
+        private void BuildQueryTree(State state, Query branch)
+        {
+            if (branch != null)
+            {
+                // modify our branch to a BooleanQuery wrapper for not
+                // this is necessary any time a term, phrase, or subquery is negated
+                if (state.Not % 2 == 1)
+                {
+                    BooleanQuery nq = new BooleanQuery();
+                    nq.Add(branch, BooleanClause.Occur.MUST_NOT);
+                    nq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
+                    branch = nq;
+                }
+
+                // first term (or phrase or subquery) found and will begin our query tree
+                if (state.Top == null)
+                {
+                    state.Top = branch;
+                }
+                else
+                {
+                    // more than one term (or phrase or subquery) found
+                    // set currentOperation to the default if no other operation is explicitly set
+                    if (!state.CurrentOperationIsSet)
+                    {
+                        state.CurrentOperation = defaultOperator;
+                    }
+
+                    // operational change requiring a new parent node
+                    // this occurs if the previous operation is not the same as current operation
+                    // because the previous operation must be evaluated separately to preserve
+                    // the proper precedence and the current operation will take over as the top of the tree
+                    if (!state.PreviousOperationIsSet || state.PreviousOperation != state.CurrentOperation)
+                    {
+                        BooleanQuery bq = new BooleanQuery();
+                        bq.Add(state.Top, state.CurrentOperation);
+                        state.Top = bq;
+                    }
+
+                    // reset all of the state for reuse
+                    ((BooleanQuery)state.Top).Add(branch, state.CurrentOperation);
+                    state.PreviousOperation = state.CurrentOperation;
+                }
+
+                // reset the current operation as it was intended to be applied to
+                // the incoming term (or phrase or subquery) even if branch was null
+                // due to other possible errors
+                state.CurrentOperationIsSet = false;
+            }
+        }
+
+        /// <summary>
+        /// Helper parsing fuzziness from parsing state
+        /// </summary>
+        /// <param name="state"></param>
+        /// <returns>slop/edit distance, 0 in the case of non-parsing slop/edit string</returns>
+        private int ParseFuzziness(State state)
+        {
+            char[] slopText = new char[state.Length];
+            int slopLength = 0;
+
+            if (state.Data[state.Index] == '~')
+            {
+                while (state.Index < state.Length)
+                {
+                    state.Index++;
+                    // it's possible that the ~ was at the end, so check after incrementing
+                    // to make sure we don't go out of bounds
+                    if (state.Index < state.Length)
+                    {
+                        if (TokenFinished(state))
+                        {
+                            break;
+                        }
+                        slopText[slopLength] = state.Data[state.Index];
+                        slopLength++;
+                    }
+                }
+                int fuzziness = 0;
+                int.TryParse(new string(slopText, 0, slopLength), out fuzziness);
+                // negative -> 0
+                if (fuzziness < 0)
+                {
+                    fuzziness = 0;
+                }
+                return fuzziness;
+            }
+            return 0;
+        }
+
+        /// <summary>
+        /// Helper returning true if the state has reached the end of token.
+        /// </summary>
+        /// <param name="state"></param>
+        /// <returns></returns>
+        private bool TokenFinished(State state)
+        {
+            if ((state.Data[state.Index] == '"' && (flags & PHRASE_OPERATOR) != 0)
+                || (state.Data[state.Index] == '|' && (flags & OR_OPERATOR) != 0)
+                || (state.Data[state.Index] == '+' && (flags & AND_OPERATOR) != 0)
+                || (state.Data[state.Index] == '(' && (flags & PRECEDENCE_OPERATORS) != 0)
+                || (state.Data[state.Index] == ')' && (flags & PRECEDENCE_OPERATORS) != 0)
+                || ((state.Data[state.Index] == ' '
+                || state.Data[state.Index] == '\t'
+                || state.Data[state.Index] == '\n'
+                || state.Data[state.Index] == '\r') && (flags & WHITESPACE_OPERATOR) != 0))
+            {
+                return true;
+            }
+            return false;
+        }
+
+        /// <summary>
+        /// Factory method to generate a standard query (no phrase or prefix operators).
+        /// </summary>
+        /// <param name="text"></param>
+        /// <returns></returns>
+        protected virtual Query NewDefaultQuery(string text)
+        {
+            BooleanQuery bq = new BooleanQuery(true);
+            foreach (var entry in weights)
+            {
+                Query q = CreateBooleanQuery(entry.Key, text, defaultOperator);
+                if (q != null)
+                {
+                    q.Boost = entry.Value;
+                    bq.Add(q, BooleanClause.Occur.SHOULD);
+                }
+            }
+            return Simplify(bq);
+        }
+
+        /// <summary>
+        /// Factory method to generate a fuzzy query.
+        /// </summary>
+        /// <param name="text"></param>
+        /// <param name="fuzziness"></param>
+        /// <returns></returns>
+        protected virtual Query NewFuzzyQuery(string text, int fuzziness)
+        {
+            BooleanQuery bq = new BooleanQuery(true);
+            foreach (var entry in weights)
+            {
+                Query q = new FuzzyQuery(new Term(entry.Key, text), fuzziness);
+                if (q != null)
+                {
+                    q.Boost = entry.Value;
+                    bq.Add(q, BooleanClause.Occur.SHOULD);
+                }
+            }
+            return Simplify(bq);
+        }
+
+        /// <summary>
+        /// Factory method to generate a phrase query with slop.
+        /// </summary>
+        /// <param name="text"></param>
+        /// <param name="slop"></param>
+        /// <returns></returns>
+        protected virtual Query NewPhraseQuery(string text, int slop)
+        {
+            BooleanQuery bq = new BooleanQuery(true);
+            foreach (var entry in weights)
+            {
+                Query q = CreatePhraseQuery(entry.Key, text, slop);
+                if (q != null)
+                {
+                    q.Boost = entry.Value;
+                    bq.Add(q, BooleanClause.Occur.SHOULD);
+                }
+            }
+            return Simplify(bq);
+        }
+
+        /// <summary>
+        /// Factory method to generate a prefix query.
+        /// </summary>
+        /// <param name="text"></param>
+        /// <returns></returns>
+        protected virtual Query NewPrefixQuery(string text)
+        {
+            BooleanQuery bq = new BooleanQuery(true);
+            foreach (var entry in weights)
+            {
+                PrefixQuery prefix = new PrefixQuery(new Term(entry.Key, text));
+                prefix.Boost = entry.Value;
+                bq.Add(prefix, BooleanClause.Occur.SHOULD);
+            }
+            return Simplify(bq);
+        }
+
+        /// <summary>
+        /// Helper to simplify boolean queries with 0 or 1 clause
+        /// </summary>
+        /// <param name="bq"></param>
+        /// <returns></returns>
+        protected virtual Query Simplify(BooleanQuery bq)
+        {
+            if (!bq.Clauses.Any())
+            {
+                return null;
+            }
+            else if (bq.Clauses.Length == 1)
+            {
+                return bq.Clauses[0].Query;
+            }
+            else
+            {
+                return bq;
+            }
+        }
+
+        /// <summary>
+        /// Gets or Sets the implicit operator setting, which will be
+        /// either {@code SHOULD} or {@code MUST}.
+        /// </summary>
+        public virtual BooleanClause.Occur DefaultOperator
+        {
+            get { return defaultOperator; }
+            set { defaultOperator = value; }
+        }
+
+
+        public class State
+        {
+            //private readonly char[] data;   // the characters in the query string
+            //private readonly char[] buffer; // a temporary buffer used to reduce necessary allocations
+            //private int index;
+            //private int length;
+
+            private BooleanClause.Occur currentOperation;
+            private BooleanClause.Occur previousOperation;
+            //private int not;
+
+            //private Query top;
+
+            internal State(char[] data, char[] buffer, int index, int length)
+            {
+                this.Data = data;
+                this.Buffer = buffer;
+                this.Index = index;
+                this.Length = length;
+            }
+
+            public char[] Data { get; protected set; } // the characters in the query string
+            public char[] Buffer { get; protected set; } // a temporary buffer used to reduce necessary allocations
+            public int Index { get; set; }
+            public int Length { get; protected set; }
+
+            public BooleanClause.Occur CurrentOperation 
+            {
+                get 
+                { 
+                    return currentOperation; 
+                }
+                set
+                {
+                    currentOperation = value;
+                    CurrentOperationIsSet = true;
+                }
+            }
+
+            public BooleanClause.Occur PreviousOperation
+            {
+                get
+                {
+                    return previousOperation;
+                }
+                set
+                {
+                    previousOperation = value;
+                    PreviousOperationIsSet = true;
+                }
+            }
+
+            public bool CurrentOperationIsSet { get; set; }
+            public bool PreviousOperationIsSet { get; set; }
+
+            public int Not { get; set; }
+            public Query Top { get; set; }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6224f3e2/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj b/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
index adab182..bcf9568 100644
--- a/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
+++ b/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
@@ -53,6 +53,7 @@
     <Compile Include="Ext\TestExtensions.cs" />
     <Compile Include="Properties\AssemblyInfo.cs" />
     <Compile Include="Classic\TestMultiAnalyzer.cs" />
+    <Compile Include="Simple\TestSimpleQueryParser.cs" />
     <Compile Include="Util\QueryParserTestBase.cs" />
   </ItemGroup>
   <ItemGroup>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/6224f3e2/Lucene.Net.Tests.QueryParser/Simple/TestSimpleQueryParser.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Simple/TestSimpleQueryParser.cs b/Lucene.Net.Tests.QueryParser/Simple/TestSimpleQueryParser.cs
new file mode 100644
index 0000000..0a9d49f
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Simple/TestSimpleQueryParser.cs
@@ -0,0 +1,728 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using Lucene.Net.Util.Automaton;
+using NUnit.Framework;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Simple
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Tests for <see cref="SimpleQueryParser"/>
+    /// </summary>
+    [TestFixture]
+    public class TestSimpleQueryParser : LuceneTestCase
+    {
+        /// <summary>
+        /// helper to parse a query with whitespace+lowercase analyzer across "field",
+        /// with default operator of MUST
+        /// </summary>
+        /// <param name="text"></param>
+        /// <returns></returns>
+        private Query Parse(string text)
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+            SimpleQueryParser parser = new SimpleQueryParser(analyzer, "field");
+            parser.DefaultOperator = BooleanClause.Occur.MUST;
+            return parser.Parse(text);
+        }
+
+        /// <summary>
+        /// helper to parse a query with whitespace+lowercase analyzer across "field",
+        /// with default operator of MUST
+        /// </summary>
+        /// <param name="text"></param>
+        /// <param name="flags"></param>
+        /// <returns></returns>
+        private Query Parse(string text, int flags)
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+            SimpleQueryParser parser = new SimpleQueryParser(analyzer, new HashMap<string, float>() { { "field", 1f } }, flags);
+            parser.DefaultOperator = BooleanClause.Occur.MUST;
+            return parser.Parse(text);
+        }
+
+        /** test a simple term */
+        [Test]
+        public void TestTerm()
+        {
+            Query expected = new TermQuery(new Term("field", "foobar"));
+
+            assertEquals(expected, Parse("foobar"));
+        }
+
+        /** test a fuzzy query */
+        [Test]
+        public void TestFuzzy()
+        {
+            Query regular = new TermQuery(new Term("field", "foobar"));
+            Query expected = new FuzzyQuery(new Term("field", "foobar"), 2);
+
+            assertEquals(expected, Parse("foobar~2"));
+            assertEquals(regular, Parse("foobar~"));
+            assertEquals(regular, Parse("foobar~a"));
+            assertEquals(regular, Parse("foobar~1a"));
+
+            BooleanQuery @bool = new BooleanQuery();
+            FuzzyQuery fuzzy = new FuzzyQuery(new Term("field", "foo"), LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE);
+            @bool.Add(fuzzy, BooleanClause.Occur.MUST);
+            @bool.Add(new TermQuery(new Term("field", "bar")), BooleanClause.Occur.MUST);
+
+            assertEquals(@bool, Parse("foo~" + LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE + 1 + " bar"));
+        }
+
+        /** test a simple phrase */
+        [Test]
+        public void TestPhrase()
+        {
+            PhraseQuery expected = new PhraseQuery();
+            expected.Add(new Term("field", "foo"));
+            expected.Add(new Term("field", "bar"));
+
+            assertEquals(expected, Parse("\"foo bar\""));
+        }
+
+        /** test a simple phrase with various slop settings */
+        [Test]
+        public void TestPhraseWithSlop()
+        {
+            PhraseQuery expectedWithSlop = new PhraseQuery();
+            expectedWithSlop.Add(new Term("field", "foo"));
+            expectedWithSlop.Add(new Term("field", "bar"));
+            expectedWithSlop.Slop = (2);
+
+            assertEquals(expectedWithSlop, Parse("\"foo bar\"~2"));
+
+            PhraseQuery expectedWithMultiDigitSlop = new PhraseQuery();
+            expectedWithMultiDigitSlop.Add(new Term("field", "foo"));
+            expectedWithMultiDigitSlop.Add(new Term("field", "bar"));
+            expectedWithMultiDigitSlop.Slop = (10);
+
+            assertEquals(expectedWithMultiDigitSlop, Parse("\"foo bar\"~10"));
+
+            PhraseQuery expectedNoSlop = new PhraseQuery();
+            expectedNoSlop.Add(new Term("field", "foo"));
+            expectedNoSlop.Add(new Term("field", "bar"));
+
+            assertEquals("Ignore trailing tilde with no slop", expectedNoSlop, Parse("\"foo bar\"~"));
+            assertEquals("Ignore non-numeric trailing slop", expectedNoSlop, Parse("\"foo bar\"~a"));
+            assertEquals("Ignore non-numeric trailing slop", expectedNoSlop, Parse("\"foo bar\"~1a"));
+            assertEquals("Ignore negative trailing slop", expectedNoSlop, Parse("\"foo bar\"~-1"));
+
+            PhraseQuery pq = new PhraseQuery();
+            pq.Add(new Term("field", "foo"));
+            pq.Add(new Term("field", "bar"));
+            pq.Slop = (12);
+
+            BooleanQuery expectedBoolean = new BooleanQuery();
+            expectedBoolean.Add(pq, BooleanClause.Occur.MUST);
+            expectedBoolean.Add(new TermQuery(new Term("field", "baz")), BooleanClause.Occur.MUST);
+
+            assertEquals(expectedBoolean, Parse("\"foo bar\"~12 baz"));
+        }
+
+        /** test a simple prefix */
+        [Test]
+        public void TestPrefix()
+        {
+            PrefixQuery expected = new PrefixQuery(new Term("field", "foobar"));
+
+            assertEquals(expected, Parse("foobar*"));
+        }
+
+        /** test some AND'd terms using '+' operator */
+        [Test]
+        public void TestAND()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "bar")), BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("foo+bar"));
+        }
+
+        /** test some AND'd phrases using '+' operator */
+        [Test]
+        public void TestANDPhrase()
+        {
+            PhraseQuery phrase1 = new PhraseQuery();
+            phrase1.Add(new Term("field", "foo"));
+            phrase1.Add(new Term("field", "bar"));
+            PhraseQuery phrase2 = new PhraseQuery();
+            phrase2.Add(new Term("field", "star"));
+            phrase2.Add(new Term("field", "wars"));
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(phrase1, BooleanClause.Occur.MUST);
+            expected.Add(phrase2, BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("\"foo bar\"+\"star wars\""));
+        }
+
+        /** test some AND'd terms (just using whitespace) */
+        [Test]
+        public void TestANDImplicit()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "bar")), BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("foo bar"));
+        }
+
+        /** test some OR'd terms */
+        [Test]
+        public void TestOR()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "bar")), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("foo|bar"));
+            assertEquals(expected, Parse("foo||bar"));
+        }
+
+        /** test some OR'd terms (just using whitespace) */
+        [Test]
+        public void TestORImplicit()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "bar")), BooleanClause.Occur.SHOULD);
+
+            SimpleQueryParser parser = new SimpleQueryParser(new MockAnalyzer(Random()), "field");
+            assertEquals(expected, parser.Parse("foo bar"));
+        }
+
+        /** test some OR'd phrases using '|' operator */
+        [Test]
+        public void TestORPhrase()
+        {
+            PhraseQuery phrase1 = new PhraseQuery();
+            phrase1.Add(new Term("field", "foo"));
+            phrase1.Add(new Term("field", "bar"));
+            PhraseQuery phrase2 = new PhraseQuery();
+            phrase2.Add(new Term("field", "star"));
+            phrase2.Add(new Term("field", "wars"));
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(phrase1, BooleanClause.Occur.SHOULD);
+            expected.Add(phrase2, BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("\"foo bar\"|\"star wars\""));
+        }
+
+        /** test negated term */
+        [Test]
+        public void TestNOT()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.MUST_NOT);
+            expected.Add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("-foo"));
+            assertEquals(expected, Parse("-(foo)"));
+            assertEquals(expected, Parse("---foo"));
+        }
+
+        /** test crazy prefixes with multiple asterisks */
+        [Test]
+        public void TestCrazyPrefixes1()
+        {
+            Query expected = new PrefixQuery(new Term("field", "st*ar"));
+
+            assertEquals(expected, Parse("st*ar*"));
+        }
+
+        /** test prefixes with some escaping */
+        [Test]
+        public void TestCrazyPrefixes2()
+        {
+            Query expected = new PrefixQuery(new Term("field", "st*ar\\*"));
+
+            assertEquals(expected, Parse("st*ar\\\\**"));
+        }
+
+        /** not a prefix query! the prefix operator is escaped */
+        [Test]
+        public void TestTermInDisguise()
+        {
+            Query expected = new TermQuery(new Term("field", "st*ar\\*"));
+
+            assertEquals(expected, Parse("sT*Ar\\\\\\*"));
+        }
+
+        // a number of test cases here have garbage/errors in
+        // the syntax passed in to test that the query can
+        // still be interpreted as a guess to what the human
+        // input was trying to be
+
+        [Test]
+        public void TestGarbageTerm()
+        {
+            Query expected = new TermQuery(new Term("field", "star"));
+
+            assertEquals(expected, Parse("star"));
+            assertEquals(expected, Parse("star\n"));
+            assertEquals(expected, Parse("star\r"));
+            assertEquals(expected, Parse("star\t"));
+            assertEquals(expected, Parse("star("));
+            assertEquals(expected, Parse("star)"));
+            assertEquals(expected, Parse("star\""));
+            assertEquals(expected, Parse("\t \r\n\nstar   \n \r \t "));
+            assertEquals(expected, Parse("- + \"\" - star \\"));
+        }
+
+        [Test]
+        public void TestGarbageEmpty()
+        {
+            assertNull(Parse(""));
+            assertNull(Parse("  "));
+            assertNull(Parse("  "));
+            assertNull(Parse("\\ "));
+            assertNull(Parse("\\ \\ "));
+            assertNull(Parse("\"\""));
+            assertNull(Parse("\" \""));
+            assertNull(Parse("\" \"|\" \""));
+            assertNull(Parse("(\" \"|\" \")"));
+            assertNull(Parse("\" \" \" \""));
+            assertNull(Parse("(\" \" \" \")"));
+        }
+
+        [Test]
+        public void TestGarbageAND()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("star wars"));
+            assertEquals(expected, Parse("star+wars"));
+            assertEquals(expected, Parse("     star     wars   "));
+            assertEquals(expected, Parse("     star +    wars   "));
+            assertEquals(expected, Parse("  |     star + + |   wars   "));
+            assertEquals(expected, Parse("  |     star + + |   wars   \\"));
+        }
+
+        [Test]
+        public void TestGarbageOR()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("star|wars"));
+            assertEquals(expected, Parse("     star |    wars   "));
+            assertEquals(expected, Parse("  |     star | + |   wars   "));
+            assertEquals(expected, Parse("  +     star | + +   wars   \\"));
+        }
+
+        [Test]
+        public void TestGarbageNOT()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST_NOT);
+            expected.Add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("-star"));
+            assertEquals(expected, Parse("---star"));
+            assertEquals(expected, Parse("- -star -"));
+        }
+
+        [Test]
+        public void TestGarbagePhrase()
+        {
+            PhraseQuery expected = new PhraseQuery();
+            expected.Add(new Term("field", "star"));
+            expected.Add(new Term("field", "wars"));
+
+            assertEquals(expected, Parse("\"star wars\""));
+            assertEquals(expected, Parse("\"star wars\\ \""));
+            assertEquals(expected, Parse("\"\" | \"star wars\""));
+            assertEquals(expected, Parse("          \"star wars\"        \"\"\\"));
+        }
+
+        [Test]
+        public void TestGarbageSubquery()
+        {
+            Query expected = new TermQuery(new Term("field", "star"));
+
+            assertEquals(expected, Parse("(star)"));
+            assertEquals(expected, Parse("(star))"));
+            assertEquals(expected, Parse("((star)"));
+            assertEquals(expected, Parse("     -()(star)        \n\n\r     "));
+            assertEquals(expected, Parse("| + - ( + - |      star    \n      ) \n"));
+        }
+
+        [Test]
+        public void TestCompoundAnd()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("star wars empire"));
+            assertEquals(expected, Parse("star+wars + empire"));
+            assertEquals(expected, Parse(" | --star wars empire \n\\"));
+        }
+
+        [Test]
+        public void TestCompoundOr()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("star|wars|empire"));
+            assertEquals(expected, Parse("star|wars | empire"));
+            assertEquals(expected, Parse(" | --star|wars|empire \n\\"));
+        }
+
+        [Test]
+        public void TestComplex00()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery inner = new BooleanQuery();
+            inner.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.SHOULD);
+            inner.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner, BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("star|wars empire"));
+            assertEquals(expected, Parse("star|wars + empire"));
+            assertEquals(expected, Parse("star| + wars + ----empire |"));
+        }
+
+        [Test]
+        public void TestComplex01()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery inner = new BooleanQuery();
+            inner.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            inner.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
+            expected.Add(inner, BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("star wars | empire"));
+            assertEquals(expected, Parse("star + wars|empire"));
+            assertEquals(expected, Parse("star + | wars | ----empire +"));
+        }
+
+        [Test]
+        public void TestComplex02()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery inner = new BooleanQuery();
+            inner.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            inner.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
+            expected.Add(inner, BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "strikes")), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("star wars | empire | strikes"));
+            assertEquals(expected, Parse("star + wars|empire | strikes"));
+            assertEquals(expected, Parse("star + | wars | ----empire | + --strikes \\"));
+        }
+
+        [Test]
+        public void TestComplex03()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery inner = new BooleanQuery();
+            BooleanQuery inner2 = new BooleanQuery();
+            inner2.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            inner2.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
+            inner.Add(inner2, BooleanClause.Occur.SHOULD);
+            inner.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
+            inner.Add(new TermQuery(new Term("field", "strikes")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner, BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "back")), BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("star wars | empire | strikes back"));
+            assertEquals(expected, Parse("star + wars|empire | strikes + back"));
+            assertEquals(expected, Parse("star + | wars | ----empire | + --strikes + | --back \\"));
+        }
+
+        [Test]
+        public void TestComplex04()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery inner = new BooleanQuery();
+            BooleanQuery inner2 = new BooleanQuery();
+            inner.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            inner.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
+            inner2.Add(new TermQuery(new Term("field", "strikes")), BooleanClause.Occur.MUST);
+            inner2.Add(new TermQuery(new Term("field", "back")), BooleanClause.Occur.MUST);
+            expected.Add(inner, BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner2, BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("(star wars) | empire | (strikes back)"));
+            assertEquals(expected, Parse("(star + wars) |empire | (strikes + back)"));
+            assertEquals(expected, Parse("(star + | wars |) | ----empire | + --(strikes + | --back) \\"));
+        }
+
+        [Test]
+        public void TestComplex05()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery inner1 = new BooleanQuery();
+            BooleanQuery inner2 = new BooleanQuery();
+            BooleanQuery inner3 = new BooleanQuery();
+            BooleanQuery inner4 = new BooleanQuery();
+
+            expected.Add(inner1, BooleanClause.Occur.SHOULD);
+            expected.Add(inner2, BooleanClause.Occur.SHOULD);
+
+            inner1.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            inner1.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.MUST);
+
+            inner2.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
+            inner2.Add(inner3, BooleanClause.Occur.SHOULD);
+
+            inner3.Add(new TermQuery(new Term("field", "strikes")), BooleanClause.Occur.MUST);
+            inner3.Add(new TermQuery(new Term("field", "back")), BooleanClause.Occur.MUST);
+            inner3.Add(inner4, BooleanClause.Occur.MUST);
+
+            inner4.Add(new TermQuery(new Term("field", "jarjar")), BooleanClause.Occur.MUST_NOT);
+            inner4.Add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
+
+            assertEquals(expected, Parse("(star wars) | (empire | (strikes back -jarjar))"));
+            assertEquals(expected, Parse("(star + wars) |(empire | (strikes + back -jarjar) () )"));
+            assertEquals(expected, Parse("(star + | wars |) | --(--empire | + --(strikes + | --back + -jarjar) \"\" ) \""));
+        }
+
+        [Test]
+        public void TestComplex06()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery inner1 = new BooleanQuery();
+            BooleanQuery inner2 = new BooleanQuery();
+            BooleanQuery inner3 = new BooleanQuery();
+
+            expected.Add(new TermQuery(new Term("field", "star")), BooleanClause.Occur.MUST);
+            expected.Add(inner1, BooleanClause.Occur.MUST);
+
+            inner1.Add(new TermQuery(new Term("field", "wars")), BooleanClause.Occur.SHOULD);
+            inner1.Add(inner2, BooleanClause.Occur.SHOULD);
+
+            inner2.Add(inner3, BooleanClause.Occur.MUST);
+            inner3.Add(new TermQuery(new Term("field", "empire")), BooleanClause.Occur.SHOULD);
+            inner3.Add(new TermQuery(new Term("field", "strikes")), BooleanClause.Occur.SHOULD);
+            inner2.Add(new TermQuery(new Term("field", "back")), BooleanClause.Occur.MUST);
+            inner2.Add(new TermQuery(new Term("field", "jar+|jar")), BooleanClause.Occur.MUST);
+
+            assertEquals(expected, Parse("star (wars | (empire | strikes back jar\\+\\|jar))"));
+            assertEquals(expected, Parse("star + (wars |(empire | strikes + back jar\\+\\|jar) () )"));
+            assertEquals(expected, Parse("star + (| wars | | --(--empire | + --strikes + | --back + jar\\+\\|jar) \"\" ) \""));
+        }
+
+        /** test a term with field weights */
+        [Test]
+        public void TestWeightedTerm()
+        {
+            IDictionary<string, float> weights = new Dictionary<string, float>();
+            weights["field0"] = 5f;
+            weights["field1"] = 10f;
+
+            BooleanQuery expected = new BooleanQuery(true);
+            Query field0 = new TermQuery(new Term("field0", "foo"));
+            field0.Boost = (5f);
+            expected.Add(field0, BooleanClause.Occur.SHOULD);
+            Query field1 = new TermQuery(new Term("field1", "foo"));
+            field1.Boost = (10f);
+            expected.Add(field1, BooleanClause.Occur.SHOULD);
+
+            Analyzer analyzer = new MockAnalyzer(Random());
+            SimpleQueryParser parser = new SimpleQueryParser(analyzer, weights);
+            assertEquals(expected, parser.Parse("foo"));
+        }
+
+        /** test a more complex query with field weights */
+        [Test]
+        public void testWeightedOR()
+        {
+            IDictionary<string, float> weights = new Dictionary<string, float>();
+            weights["field0"] = 5f;
+            weights["field1"] = 10f;
+
+            BooleanQuery expected = new BooleanQuery();
+            BooleanQuery foo = new BooleanQuery(true);
+            Query field0 = new TermQuery(new Term("field0", "foo"));
+            field0.Boost = (5f);
+            foo.Add(field0, BooleanClause.Occur.SHOULD);
+            Query field1 = new TermQuery(new Term("field1", "foo"));
+            field1.Boost = (10f);
+            foo.Add(field1, BooleanClause.Occur.SHOULD);
+            expected.Add(foo, BooleanClause.Occur.SHOULD);
+
+            BooleanQuery bar = new BooleanQuery(true);
+            field0 = new TermQuery(new Term("field0", "bar"));
+            field0.Boost = (5f);
+            bar.Add(field0, BooleanClause.Occur.SHOULD);
+            field1 = new TermQuery(new Term("field1", "bar"));
+            field1.Boost = (10f);
+            bar.Add(field1, BooleanClause.Occur.SHOULD);
+            expected.Add(bar, BooleanClause.Occur.SHOULD);
+
+            Analyzer analyzer = new MockAnalyzer(Random());
+            SimpleQueryParser parser = new SimpleQueryParser(analyzer, weights);
+            assertEquals(expected, parser.Parse("foo|bar"));
+        }
+
+        /** helper to parse a query with keyword analyzer across "field" */
+        private Query ParseKeyword(string text, int flags)
+        {
+            Analyzer analyzer = new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false);
+            SimpleQueryParser parser = new SimpleQueryParser(analyzer,
+                new HashMap<string, float>() { { "field", 1f } },
+                flags);
+            return parser.Parse(text);
+        }
+
+        /** test the ability to enable/disable phrase operator */
+        [Test]
+        public void TestDisablePhrase()
+        {
+            Query expected = new TermQuery(new Term("field", "\"test\""));
+            assertEquals(expected, ParseKeyword("\"test\"", SimpleQueryParser.PHRASE_OPERATOR));
+        }
+
+        /** test the ability to enable/disable prefix operator */
+        [Test]
+        public void TestDisablePrefix()
+        {
+            Query expected = new TermQuery(new Term("field", "test*"));
+            assertEquals(expected, ParseKeyword("test*", SimpleQueryParser.PREFIX_OPERATOR));
+        }
+
+        /** test the ability to enable/disable AND operator */
+        [Test]
+        public void TestDisableAND()
+        {
+            Query expected = new TermQuery(new Term("field", "foo+bar"));
+            assertEquals(expected, ParseKeyword("foo+bar", SimpleQueryParser.AND_OPERATOR));
+            expected = new TermQuery(new Term("field", "+foo+bar"));
+            assertEquals(expected, ParseKeyword("+foo+bar", SimpleQueryParser.AND_OPERATOR));
+        }
+
+        /** test the ability to enable/disable OR operator */
+        [Test]
+        public void TestDisableOR()
+        {
+            Query expected = new TermQuery(new Term("field", "foo|bar"));
+            assertEquals(expected, ParseKeyword("foo|bar", SimpleQueryParser.OR_OPERATOR));
+            expected = new TermQuery(new Term("field", "|foo|bar"));
+            assertEquals(expected, ParseKeyword("|foo|bar", SimpleQueryParser.OR_OPERATOR));
+        }
+
+        /** test the ability to enable/disable NOT operator */
+        [Test]
+        public void TestDisableNOT()
+        {
+            Query expected = new TermQuery(new Term("field", "-foo"));
+            assertEquals(expected, ParseKeyword("-foo", SimpleQueryParser.NOT_OPERATOR));
+        }
+
+        /** test the ability to enable/disable precedence operators */
+        [Test]
+        public void TestDisablePrecedence()
+        {
+            Query expected = new TermQuery(new Term("field", "(foo)"));
+            assertEquals(expected, ParseKeyword("(foo)", SimpleQueryParser.PRECEDENCE_OPERATORS));
+            expected = new TermQuery(new Term("field", ")foo("));
+            assertEquals(expected, ParseKeyword(")foo(", SimpleQueryParser.PRECEDENCE_OPERATORS));
+        }
+
+        /** test the ability to enable/disable escape operators */
+        [Test]
+        public void TestDisableEscape()
+        {
+            Query expected = new TermQuery(new Term("field", "foo\\bar"));
+            assertEquals(expected, ParseKeyword("foo\\bar", SimpleQueryParser.ESCAPE_OPERATOR));
+            assertEquals(expected, ParseKeyword("(foo\\bar)", SimpleQueryParser.ESCAPE_OPERATOR));
+            assertEquals(expected, ParseKeyword("\"foo\\bar\"", SimpleQueryParser.ESCAPE_OPERATOR));
+        }
+
+        [Test]
+        public void TestDisableWhitespace()
+        {
+            Query expected = new TermQuery(new Term("field", "foo foo"));
+            assertEquals(expected, ParseKeyword("foo foo", SimpleQueryParser.WHITESPACE_OPERATOR));
+            expected = new TermQuery(new Term("field", " foo foo\n "));
+            assertEquals(expected, ParseKeyword(" foo foo\n ", SimpleQueryParser.WHITESPACE_OPERATOR));
+            expected = new TermQuery(new Term("field", "\t\tfoo foo foo"));
+            assertEquals(expected, ParseKeyword("\t\tfoo foo foo", SimpleQueryParser.WHITESPACE_OPERATOR));
+        }
+
+        [Test]
+        public void TestDisableFuzziness()
+        {
+            Query expected = new TermQuery(new Term("field", "foo~1"));
+            assertEquals(expected, ParseKeyword("foo~1", SimpleQueryParser.FUZZY_OPERATOR));
+        }
+
+        [Test]
+        public void TestDisableSlop()
+        {
+            PhraseQuery expectedPhrase = new PhraseQuery();
+            expectedPhrase.Add(new Term("field", "foo"));
+            expectedPhrase.Add(new Term("field", "bar"));
+
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(expectedPhrase, BooleanClause.Occur.MUST);
+            expected.Add(new TermQuery(new Term("field", "~2")), BooleanClause.Occur.MUST);
+            assertEquals(expected, Parse("\"foo bar\"~2", SimpleQueryParser.NEAR_OPERATOR));
+        }
+
+        // we aren't supposed to barf on any input...
+        [Test]
+        public void TestRandomQueries()
+        {
+            for (int i = 0; i < 1000; i++)
+            {
+                string query = TestUtil.RandomUnicodeString(Random());
+                Parse(query); // no exception
+                ParseKeyword(query, TestUtil.NextInt(Random(), 0, 1024)); // no exception
+            }
+        }
+
+        [Test]
+        public void testRandomQueries2()
+        {
+            char[] chars = new char[] { 'a', '1', '|', '&', ' ', '(', ')', '"', '-', '~' };
+            StringBuilder sb = new StringBuilder();
+            for (int i = 0; i < 1000; i++)
+            {
+                sb.Length = (0);
+                int queryLength = Random().Next(20);
+                for (int j = 0; j < queryLength; j++)
+                {
+                    sb.append(chars[Random().Next(chars.Length)]);
+                }
+                Parse(sb.toString()); // no exception
+                ParseKeyword(sb.toString(), TestUtil.NextInt(Random(), 0, 1024)); // no exception
+            }
+        }
+    }
+}


[21/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser and Lucene.Net.Tests.QueryParser projects into src\ directory.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs
new file mode 100644
index 0000000..f233c02
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiFieldQueryParser.cs
@@ -0,0 +1,376 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+using System.IO;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestMultiFieldQueryParser : LuceneTestCase
+    {
+        /// <summary>
+        /// test stop words parsing for both the non static form, and for the 
+        /// corresponding static form (qtxt, fields[]).
+        /// </summary>
+        [Test]
+        public void TestStopwordsParsing()
+        {
+            AssertStopQueryEquals("one", "b:one t:one");
+            AssertStopQueryEquals("one stop", "b:one t:one");
+            AssertStopQueryEquals("one (stop)", "b:one t:one");
+            AssertStopQueryEquals("one ((stop))", "b:one t:one");
+            AssertStopQueryEquals("stop", "");
+            AssertStopQueryEquals("(stop)", "");
+            AssertStopQueryEquals("((stop))", "");
+        }
+
+        /// <summary>
+        /// verify parsing of query using a stopping analyzer  
+        /// </summary>
+        /// <param name="qtxt"></param>
+        /// <param name="expectedRes"></param>
+        private void AssertStopQueryEquals(string qtxt, string expectedRes)
+        {
+            string[] fields = { "b", "t" };
+            BooleanClause.Occur[] occur = new BooleanClause.Occur[] { BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD };
+            TestQueryParser.QPTestAnalyzer a = new TestQueryParser.QPTestAnalyzer();
+            MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, a);
+
+            Query q = mfqp.Parse(qtxt);
+            assertEquals(expectedRes, q.toString());
+
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, qtxt, fields, occur, a);
+            assertEquals(expectedRes, q.toString());
+        }
+
+        [Test]
+        public void TestSimple()
+        {
+            string[] fields = { "b", "t" };
+            MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new MockAnalyzer(Random()));
+
+            Query q = mfqp.Parse("one");
+            assertEquals("b:one t:one", q.toString());
+
+            q = mfqp.Parse("one two");
+            assertEquals("(b:one t:one) (b:two t:two)", q.toString());
+
+            q = mfqp.Parse("+one +two");
+            assertEquals("+(b:one t:one) +(b:two t:two)", q.toString());
+
+            q = mfqp.Parse("+one -two -three");
+            assertEquals("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q.toString());
+
+            q = mfqp.Parse("one^2 two");
+            assertEquals("((b:one t:one)^2.0) (b:two t:two)", q.toString());
+
+            q = mfqp.Parse("one~ two");
+            assertEquals("(b:one~2 t:one~2) (b:two t:two)", q.toString());
+
+            q = mfqp.Parse("one~0.8 two^2");
+            assertEquals("(b:one~0 t:one~0) ((b:two t:two)^2.0)", q.toString());
+
+            q = mfqp.Parse("one* two*");
+            assertEquals("(b:one* t:one*) (b:two* t:two*)", q.toString());
+
+            q = mfqp.Parse("[a TO c] two");
+            assertEquals("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.toString());
+
+            q = mfqp.Parse("w?ldcard");
+            assertEquals("b:w?ldcard t:w?ldcard", q.toString());
+
+            q = mfqp.Parse("\"foo bar\"");
+            assertEquals("b:\"foo bar\" t:\"foo bar\"", q.toString());
+
+            q = mfqp.Parse("\"aa bb cc\" \"dd ee\"");
+            assertEquals("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", q.toString());
+
+            q = mfqp.Parse("\"foo bar\"~4");
+            assertEquals("b:\"foo bar\"~4 t:\"foo bar\"~4", q.toString());
+
+            // LUCENE-1213: MultiFieldQueryParser was ignoring slop when phrase had a field.
+            q = mfqp.Parse("b:\"foo bar\"~4");
+            assertEquals("b:\"foo bar\"~4", q.toString());
+
+            // make sure that terms which have a field are not touched:
+            q = mfqp.Parse("one f:two");
+            assertEquals("(b:one t:one) f:two", q.toString());
+
+            // AND mode:
+            mfqp.DefaultOperator = QueryParserBase.AND_OPERATOR;
+            q = mfqp.Parse("one two");
+            assertEquals("+(b:one t:one) +(b:two t:two)", q.toString());
+            q = mfqp.Parse("\"aa bb cc\" \"dd ee\"");
+            assertEquals("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")", q.toString());
+        }
+
+        [Test]
+        public void TestBoostsSimple()
+        {
+            IDictionary<string, float> boosts = new Dictionary<string, float>();
+            boosts["b"] = (float)5;
+            boosts["t"] = (float)10;
+            string[] fields = { "b", "t" };
+            MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new MockAnalyzer(Random()), boosts);
+
+
+            //Check for simple
+            Query q = mfqp.Parse("one");
+            assertEquals("b:one^5.0 t:one^10.0", q.toString());
+
+            //Check for AND
+            q = mfqp.Parse("one AND two");
+            assertEquals("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0)", q.toString());
+
+            //Check for OR
+            q = mfqp.Parse("one OR two");
+            assertEquals("(b:one^5.0 t:one^10.0) (b:two^5.0 t:two^10.0)", q.toString());
+
+            //Check for AND and a field
+            q = mfqp.Parse("one AND two AND foo:test");
+            assertEquals("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0) +foo:test", q.toString());
+
+            q = mfqp.Parse("one^3 AND two^4");
+            assertEquals("+((b:one^5.0 t:one^10.0)^3.0) +((b:two^5.0 t:two^10.0)^4.0)", q.toString());
+        }
+
+        [Test]
+        public void TestStaticMethod1()
+        {
+            string[] fields = { "b", "t" };
+            string[] queries = { "one", "two" };
+            Query q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries, fields, new MockAnalyzer(Random()));
+            assertEquals("b:one t:two", q.toString());
+
+            string[] queries2 = { "+one", "+two" };
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries2, fields, new MockAnalyzer(Random()));
+            assertEquals("(+b:one) (+t:two)", q.toString());
+
+            string[] queries3 = { "one", "+two" };
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries3, fields, new MockAnalyzer(Random()));
+            assertEquals("b:one (+t:two)", q.toString());
+
+            string[] queries4 = { "one +more", "+two" };
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries4, fields, new MockAnalyzer(Random()));
+            assertEquals("(b:one +b:more) (+t:two)", q.toString());
+
+            string[] queries5 = { "blah" };
+            try
+            {
+                q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries5, fields, new MockAnalyzer(Random()));
+                fail();
+            }
+            catch (ArgumentException e)
+            {
+                // expected exception, array length differs
+            }
+
+            // check also with stop words for this static form (qtxts[], fields[]).
+            TestQueryParser.QPTestAnalyzer stopA = new TestQueryParser.QPTestAnalyzer();
+
+            string[] queries6 = { "((+stop))", "+((stop))" };
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries6, fields, stopA);
+            assertEquals("", q.toString());
+
+            string[] queries7 = { "one ((+stop)) +more", "+((stop)) +two" };
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries7, fields, stopA);
+            assertEquals("(b:one +b:more) (+t:two)", q.toString());
+        }
+
+        [Test]
+        public void TestStaticMethod2()
+        {
+            string[] fields = { "b", "t" };
+            BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT };
+            Query q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "one", fields, flags, new MockAnalyzer(Random()));
+            assertEquals("+b:one -t:one", q.toString());
+
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "one two", fields, flags, new MockAnalyzer(Random()));
+            assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
+
+            try
+            {
+                BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+                q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new MockAnalyzer(Random()));
+                fail();
+            }
+            catch (ArgumentException e)
+            {
+                // expected exception, array length differs
+            }
+        }
+
+        [Test]
+        public void TestStaticMethod2Old()
+        {
+            string[] fields = { "b", "t" };
+            //int[] flags = {MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD};
+            BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT };
+
+            Query q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "one", fields, flags, new MockAnalyzer(Random()));//, fields, flags, new MockAnalyzer(random));
+            assertEquals("+b:one -t:one", q.toString());
+
+            q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "one two", fields, flags, new MockAnalyzer(Random()));
+            assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
+
+            try
+            {
+                BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+                q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new MockAnalyzer(Random()));
+                fail();
+            }
+            catch (ArgumentException e)
+            {
+                // expected exception, array length differs
+            }
+        }
+
+        [Test]
+        public void TestStaticMethod3()
+        {
+            string[] queries = { "one", "two", "three" };
+            string[] fields = { "f1", "f2", "f3" };
+            BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST,
+                BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD};
+            Query q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries, fields, flags, new MockAnalyzer(Random()));
+            assertEquals("+f1:one -f2:two f3:three", q.toString());
+
+            try
+            {
+                BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+                q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries, fields, flags2, new MockAnalyzer(Random()));
+                fail();
+            }
+            catch (ArgumentException e)
+            {
+                // expected exception, array length differs
+            }
+        }
+
+        [Test]
+        public void TestStaticMethod3Old()
+        {
+            string[] queries = { "one", "two" };
+            string[] fields = { "b", "t" };
+            BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT };
+            Query q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries, fields, flags, new MockAnalyzer(Random()));
+            assertEquals("+b:one -t:two", q.toString());
+
+            try
+            {
+                BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
+                q = MultiFieldQueryParser.Parse(TEST_VERSION_CURRENT, queries, fields, flags2, new MockAnalyzer(Random()));
+                fail();
+            }
+            catch (ArgumentException e)
+            {
+                // expected exception, array length differs
+            }
+        }
+
+        [Test]
+        public void TestAnalyzerReturningNull()
+        {
+            string[] fields = new string[] { "f1", "f2", "f3" };
+            MultiFieldQueryParser parser = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new AnalyzerReturningNull());
+            Query q = parser.Parse("bla AND blo");
+            assertEquals("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.toString());
+            // the following queries are not affected as their terms are not analyzed anyway:
+            q = parser.Parse("bla*");
+            assertEquals("f1:bla* f2:bla* f3:bla*", q.toString());
+            q = parser.Parse("bla~");
+            assertEquals("f1:bla~2 f2:bla~2 f3:bla~2", q.toString());
+            q = parser.Parse("[a TO c]");
+            assertEquals("f1:[a TO c] f2:[a TO c] f3:[a TO c]", q.toString());
+        }
+
+        [Test]
+        public void TestStopWordSearching()
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+            using (var ramDir = NewDirectory())
+            {
+                using (IndexWriter iw = new IndexWriter(ramDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)))
+                {
+                    Document doc = new Document();
+                    doc.Add(NewTextField("body", "blah the footest blah", Field.Store.NO));
+                    iw.AddDocument(doc);
+                }
+
+                MultiFieldQueryParser mfqp =
+                  new MultiFieldQueryParser(TEST_VERSION_CURRENT, new string[] { "body" }, analyzer);
+                mfqp.DefaultOperator = QueryParser.Operator.AND;
+                Query q = mfqp.Parse("the footest");
+                using (IndexReader ir = DirectoryReader.Open(ramDir))
+                {
+                    IndexSearcher @is = NewSearcher(ir);
+                    ScoreDoc[] hits = @is.Search(q, null, 1000).ScoreDocs;
+                    assertEquals(1, hits.Length);
+                }
+            }
+        }
+
+        private class AnalyzerReturningNull : Analyzer
+        {
+            MockAnalyzer stdAnalyzer = new MockAnalyzer(Random());
+
+            public AnalyzerReturningNull()
+                : base(PER_FIELD_REUSE_STRATEGY)
+            { }
+
+            public override System.IO.TextReader InitReader(string fieldName, TextReader reader)
+            {
+                if ("f1".equals(fieldName))
+                {
+                    // we don't use the reader, so close it:
+                    IOUtils.CloseWhileHandlingException(reader);
+                    // return empty reader, so MockTokenizer returns no tokens:
+                    return new StringReader("");
+                }
+                else
+                {
+                    return base.InitReader(fieldName, reader);
+                }
+            }
+
+            public override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                return stdAnalyzer.CreateComponents(fieldName, reader);
+            }
+        }
+
+        [Test]
+        public void TestSimpleRegex()
+        {
+            string[] fields = new string[] { "a", "b" };
+            MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new MockAnalyzer(Random()));
+
+            BooleanQuery bq = new BooleanQuery(true);
+            bq.Add(new RegexpQuery(new Term("a", "[a-z][123]")), BooleanClause.Occur.SHOULD);
+            bq.Add(new RegexpQuery(new Term("b", "[a-z][123]")), BooleanClause.Occur.SHOULD);
+            assertEquals(bq, mfqp.Parse("/[a-z][123]/"));
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiPhraseQueryParsing.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiPhraseQueryParsing.cs b/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiPhraseQueryParsing.cs
new file mode 100644
index 0000000..3aaa9b2
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Classic/TestMultiPhraseQueryParsing.cs
@@ -0,0 +1,121 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Util;
+using NUnit.Framework;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestMultiPhraseQueryParsing_ : LuceneTestCase
+    {
+        private class TokenAndPos
+        {
+            public readonly string token;
+            public readonly int pos;
+            public TokenAndPos(string token, int pos)
+            {
+                this.token = token;
+                this.pos = pos;
+            }
+        }
+
+        private class CannedAnalyzer : Analyzer
+        {
+            private readonly TokenAndPos[] tokens;
+
+            public CannedAnalyzer(TokenAndPos[] tokens)
+            {
+                this.tokens = tokens;
+            }
+
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                return new TokenStreamComponents(new CannedTokenizer(reader, tokens));
+            }
+        }
+
+        private class CannedTokenizer : Tokenizer
+        {
+            private readonly TokenAndPos[] tokens;
+            private int upto = 0;
+            private int lastPos = 0;
+            private readonly ICharTermAttribute termAtt;
+            private readonly IPositionIncrementAttribute posIncrAtt;
+
+            public CannedTokenizer(System.IO.TextReader reader, TokenAndPos[] tokens)
+                : base(reader)
+            {
+                this.tokens = tokens;
+                this.termAtt = AddAttribute<ICharTermAttribute>();
+                this.posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
+            }
+
+            public override sealed bool IncrementToken()
+            {
+                ClearAttributes();
+                if (upto < tokens.Length)
+                {
+                    TokenAndPos token = tokens[upto++];
+                    termAtt.SetEmpty();
+                    termAtt.Append(token.token);
+                    posIncrAtt.PositionIncrement = (token.pos - lastPos);
+                    lastPos = token.pos;
+                    return true;
+                }
+                else
+                {
+                    return false;
+                }
+            }
+            public override void Reset()
+            {
+                base.Reset();
+                this.upto = 0;
+                this.lastPos = 0;
+            }
+        }
+
+        [Test]
+        public void TestMultiPhraseQueryParsing()
+        {
+            TokenAndPos[] INCR_0_QUERY_TOKENS_AND = new TokenAndPos[]
+            {
+                new TokenAndPos("a", 0),
+                new TokenAndPos("1", 0),
+                new TokenAndPos("b", 1),
+                new TokenAndPos("1", 1),
+                new TokenAndPos("c", 2)
+            };
+
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new CannedAnalyzer(INCR_0_QUERY_TOKENS_AND));
+            Query q = qp.Parse("\"this text is acually ignored\"");
+            assertTrue("wrong query type!", q is MultiPhraseQuery);
+
+            MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery();
+            multiPhraseQuery.Add(new Term[] { new Term("field", "a"), new Term("field", "1") }, -1);
+            multiPhraseQuery.Add(new Term[] { new Term("field", "b"), new Term("field", "1") }, 0);
+            multiPhraseQuery.Add(new Term[] { new Term("field", "c") }, 1);
+
+            assertEquals(multiPhraseQuery, q);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
new file mode 100644
index 0000000..369fe92
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
@@ -0,0 +1,564 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Documents;
+using Lucene.Net.QueryParser.Flexible.Standard;
+using Lucene.Net.QueryParser.Util;
+using Lucene.Net.Search;
+using Lucene.Net.Support;
+using NUnit.Framework;
+using System;
+using System.Diagnostics;
+
+namespace Lucene.Net.QueryParser.Classic
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestQueryParser : QueryParserTestBase
+    {
+        public class QPTestParser : QueryParser
+        {
+            public QPTestParser(string f, Analyzer a)
+                : base(TEST_VERSION_CURRENT, f, a)
+            {
+            }
+
+            protected internal override Query GetFuzzyQuery(string field, string termStr, float minSimilarity)
+            {
+                throw new ParseException("Fuzzy queries not allowed");
+            }
+
+            protected internal override Query GetWildcardQuery(string field, string termStr)
+            {
+                throw new ParseException("Wildcard queries not allowed");
+            }
+
+        }
+
+        // Moved to QueryParserTestBase
+        //public QueryParser GetParser(Analyzer a)
+        //{
+        //    if (a == null) a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+        //    QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, DefaultField, a);
+        //    qp.DefaultOperator = (QueryParserBase.OR_OPERATOR);
+        //    return qp;
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override ICommonQueryParserConfiguration GetParserConfig(Analyzer a)
+        //{
+        //    return GetParser(a);
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override Query GetQuery(string query, ICommonQueryParserConfiguration cqpC)
+        //{
+        //    Debug.Assert(cqpC != null, "Parameter must not be null");
+        //    Debug.Assert(cqpC is QueryParser, "Parameter must be instance of QueryParser");
+        //    QueryParser qp = (QueryParser)cqpC;
+        //    return qp.Parse(query);
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override Query GetQuery(string query, Analyzer a)
+        //{
+        //    return GetParser(a).Parse(query);
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override bool IsQueryParserException(Exception exception)
+        //{
+        //    return exception is ParseException;
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override void SetDefaultOperatorOR(ICommonQueryParserConfiguration cqpC)
+        //{
+        //    Debug.Assert(cqpC is QueryParser);
+        //    QueryParser qp = (QueryParser)cqpC;
+        //    qp.DefaultOperator = QueryParserBase.Operator.OR;
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override void SetDefaultOperatorAND(ICommonQueryParserConfiguration cqpC)
+        //{
+        //    Debug.Assert(cqpC is QueryParser);
+        //    QueryParser qp = (QueryParser)cqpC;
+        //    qp.DefaultOperator = QueryParserBase.Operator.AND;
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override void SetAnalyzeRangeTerms(ICommonQueryParserConfiguration cqpC, bool value)
+        //{
+        //    Debug.Assert(cqpC is QueryParser);
+        //    QueryParser qp = (QueryParser)cqpC;
+        //    qp.AnalyzeRangeTerms = (value);
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override void SetAutoGeneratePhraseQueries(ICommonQueryParserConfiguration cqpC, bool value)
+        //{
+        //    Debug.Assert(cqpC is QueryParser);
+        //    QueryParser qp = (QueryParser)cqpC;
+        //    qp.AutoGeneratePhraseQueries = value;
+        //}
+
+        // Moved to QueryParserTestBase
+        //public override void SetDateResolution(ICommonQueryParserConfiguration cqpC, ICharSequence field, DateTools.Resolution value)
+        //{
+        //    Debug.Assert(cqpC is QueryParser);
+        //    QueryParser qp = (QueryParser)cqpC;
+        //    qp.SetDateResolution(field.toString(), value);
+        //}
+
+        [Test]
+        public override void TestDefaultOperator()
+        {
+            QueryParser qp = GetParser(new MockAnalyzer(Random()));
+            // make sure OR is the default:
+            assertEquals(QueryParserBase.OR_OPERATOR, qp.DefaultOperator);
+            SetDefaultOperatorAND(qp);
+            assertEquals(QueryParserBase.AND_OPERATOR, qp.DefaultOperator);
+            SetDefaultOperatorOR(qp);
+            assertEquals(QueryParserBase.OR_OPERATOR, qp.DefaultOperator);
+        }
+
+        // LUCENE-2002: when we run javacc to regen QueryParser,
+        // we also run a replaceregexp step to fix 2 of the public
+        // ctors (change them to protected):
+        //
+        // protected QueryParser(CharStream stream)
+        //
+        // protected QueryParser(QueryParserTokenManager tm)
+        //
+        // This test is here as a safety, in case that ant step
+        // doesn't work for some reason.
+        [Test]
+        public void TestProtectedCtors()
+        {
+            try
+            {
+                typeof(QueryParser).GetConstructor(new Type[] { typeof(ICharStream) });
+                fail("please switch public QueryParser(CharStream) to be protected");
+            }
+            catch (Exception nsme)
+            {
+                // expected
+            }
+            try
+            {
+                typeof(QueryParser).GetConstructor(new Type[] { typeof(QueryParserTokenManager) });
+                fail("please switch public QueryParser(QueryParserTokenManager) to be protected");
+            }
+            catch (Exception nsme)
+            {
+                // expected
+            }
+        }
+
+        private class TestFuzzySlopeExtendabilityQueryParser : QueryParser
+        {
+            public TestFuzzySlopeExtendabilityQueryParser()
+                : base(TEST_VERSION_CURRENT, "a", new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false))
+            {}
+
+            protected internal override Query HandleBareFuzzy(string qfield, Token fuzzySlop, string termImage)
+            {
+                if (fuzzySlop.image.EndsWith("\u20ac"))
+                {
+                    float fms = FuzzyMinSim;
+                    try
+                    {
+                        fms = float.Parse(fuzzySlop.image.Substring(1, fuzzySlop.image.Length - 2));
+                    }
+                    catch (Exception ignored) { }
+                    float value = float.Parse(termImage);
+                    return GetRangeQuery(qfield, (value - fms / 2.0f).ToString(), (value + fms / 2.0f).ToString(), true, true);
+                }
+                return base.HandleBareFuzzy(qfield, fuzzySlop, termImage);
+            }
+        }
+
+        [Test]
+        public void TestFuzzySlopeExtendability()
+        {
+            QueryParser qp = new TestFuzzySlopeExtendabilityQueryParser();
+            assertEquals(qp.Parse("a:[11.95 TO 12.95]"), qp.Parse("12.45~1\u20ac"));
+        }
+
+        private class TestStarParsingQueryParser : QueryParser
+        {
+            public readonly int[] type = new int[1];
+
+            public TestStarParsingQueryParser()
+                : base(TEST_VERSION_CURRENT, "field", new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false))
+            { }
+
+            protected internal override Query GetWildcardQuery(string field, string termStr)
+            {
+                // override error checking of superclass
+                type[0] = 1;
+                return new TermQuery(new Index.Term(field, termStr));
+            }
+
+            protected internal override Query GetPrefixQuery(string field, string termStr)
+            {
+                // override error checking of superclass
+                type[0] = 2;
+                return new TermQuery(new Index.Term(field, termStr));
+            }
+
+            protected internal override Query GetFieldQuery(string field, string queryText, bool quoted)
+            {
+                type[0] = 3;
+                return base.GetFieldQuery(field, queryText, quoted);
+            }
+        }
+
+        [Test]
+        public override void TestStarParsing()
+        {
+            TestStarParsingQueryParser qp = new TestStarParsingQueryParser();
+
+            TermQuery tq;
+
+            tq = (TermQuery)qp.Parse("foo:zoo*");
+            assertEquals("zoo", tq.Term.Text());
+            assertEquals(2, qp.type[0]);
+
+            tq = (TermQuery)qp.Parse("foo:zoo*^2");
+            assertEquals("zoo", tq.Term.Text());
+            assertEquals(2, qp.type[0]);
+            assertEquals(tq.Boost, 2, 0);
+
+            tq = (TermQuery)qp.Parse("foo:*");
+            assertEquals("*", tq.Term.Text());
+            assertEquals(1, qp.type[0]); // could be a valid prefix query in the future too
+
+            tq = (TermQuery)qp.Parse("foo:*^2");
+            assertEquals("*", tq.Term.Text());
+            assertEquals(1, qp.type[0]);
+            assertEquals(tq.Boost, 2, 0);
+
+            tq = (TermQuery)qp.Parse("*:foo");
+            assertEquals("*", tq.Term.Field);
+            assertEquals("foo", tq.Term.Text());
+            assertEquals(3, qp.type[0]);
+
+            tq = (TermQuery)qp.Parse("*:*");
+            assertEquals("*", tq.Term.Field);
+            assertEquals("*", tq.Term.Text());
+            assertEquals(1, qp.type[0]); // could be handled as a prefix query in the
+            // future
+
+            tq = (TermQuery)qp.Parse("(*:*)");
+            assertEquals("*", tq.Term.Field);
+            assertEquals("*", tq.Term.Text());
+            assertEquals(1, qp.type[0]);
+        }
+
+        [Test]
+        public void TestCustomQueryParserWildcard()
+        {
+            try
+            {
+                new QPTestParser("contents", new MockAnalyzer(Random(),
+                    MockTokenizer.WHITESPACE, false)).Parse("a?t");
+                fail("Wildcard queries should not be allowed");
+            }
+            catch (ParseException expected)
+            {
+                // expected exception
+            }
+        }
+
+        [Test]
+        public void TestCustomQueryParserFuzzy()
+        {
+            try
+            {
+                new QPTestParser("contents", new MockAnalyzer(Random(),
+                    MockTokenizer.WHITESPACE, false)).Parse("xunit~");
+                fail("Fuzzy queries should not be allowed");
+            }
+            catch (ParseException expected)
+            {
+                // expected exception
+            }
+        }
+
+        /// <summary>
+        /// query parser that doesn't expand synonyms when users use double quotes
+        /// </summary>
+        private class SmartQueryParser : QueryParser
+        {
+            Analyzer morePrecise = new Analyzer2();
+
+            public SmartQueryParser()
+                : base(TEST_VERSION_CURRENT, "field", new Analyzer1())
+            {
+            }
+
+            protected internal override Query GetFieldQuery(string field, string queryText, bool quoted)
+            {
+                if (quoted) return NewFieldQuery(morePrecise, field, queryText, quoted);
+                else return base.GetFieldQuery(field, queryText, quoted);
+            }
+        }
+
+        public override void TestNewFieldQuery()
+        {
+            /** ordinary behavior, synonyms form uncoordinated boolean query */
+            QueryParser dumb = new QueryParser(TEST_VERSION_CURRENT, "field",
+                new Analyzer1());
+            BooleanQuery expanded = new BooleanQuery(true);
+            expanded.Add(new TermQuery(new Index.Term("field", "dogs")),
+                BooleanClause.Occur.SHOULD);
+            expanded.Add(new TermQuery(new Index.Term("field", "dog")),
+                BooleanClause.Occur.SHOULD);
+            assertEquals(expanded, dumb.Parse("\"dogs\""));
+            /** even with the phrase operator the behavior is the same */
+            assertEquals(expanded, dumb.Parse("dogs"));
+
+            /**
+             * custom behavior, the synonyms are expanded, unless you use quote operator
+             */
+            QueryParser smart = new SmartQueryParser();
+            assertEquals(expanded, smart.Parse("dogs"));
+
+            Query unexpanded = new TermQuery(new Index.Term("field", "dogs"));
+            assertEquals(unexpanded, smart.Parse("\"dogs\""));
+        }
+
+        // LUCENETODO: fold these into QueryParserTestBase
+
+        /// <summary>
+        /// adds synonym of "dog" for "dogs".
+        /// </summary>
+        public class MockSynonymAnalyzer : Analyzer
+        {
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                MockTokenizer tokenizer = new MockTokenizer(reader);
+                return new TokenStreamComponents(tokenizer, new MockSynonymFilter(tokenizer));
+            }
+        }
+
+        /// <summary>
+        /// simple synonyms test
+        /// </summary>
+        [Test]
+        public void TestSynonyms()
+        {
+            BooleanQuery expected = new BooleanQuery(true);
+            expected.Add(new TermQuery(new Index.Term("field", "dogs")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Index.Term("field", "dog")), BooleanClause.Occur.SHOULD);
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockSynonymAnalyzer());
+            assertEquals(expected, qp.Parse("dogs"));
+            assertEquals(expected, qp.Parse("\"dogs\""));
+            qp.DefaultOperator = (QueryParserBase.Operator.AND);
+            assertEquals(expected, qp.Parse("dogs"));
+            assertEquals(expected, qp.Parse("\"dogs\""));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("dogs^2"));
+            assertEquals(expected, qp.Parse("\"dogs\"^2"));
+        }
+
+        /// <summary>
+        /// forms multiphrase query
+        /// </summary>
+        [Test]
+        public void TestSynonymsPhrase()
+        {
+            MultiPhraseQuery expected = new MultiPhraseQuery();
+            expected.Add(new Index.Term("field", "old"));
+            expected.Add(new Index.Term[] { new Index.Term("field", "dogs"), new Index.Term("field", "dog") });
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockSynonymAnalyzer());
+            assertEquals(expected, qp.Parse("\"old dogs\""));
+            qp.DefaultOperator = (QueryParserBase.Operator.AND);
+            assertEquals(expected, qp.Parse("\"old dogs\""));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("\"old dogs\"^2"));
+            expected.Slop = (3);
+            assertEquals(expected, qp.Parse("\"old dogs\"~3^2"));
+        }
+
+        /// <summary>
+        /// adds synonym of "\u570b" for "\u56fd".
+        /// </summary>
+        protected internal class MockCJKSynonymFilter : TokenFilter
+        {
+            internal ICharTermAttribute TermAtt;
+            internal IPositionIncrementAttribute PosIncAtt;
+            internal bool AddSynonym = false;
+
+            public MockCJKSynonymFilter(TokenStream input)
+                : base(input)
+            {
+                TermAtt = AddAttribute<ICharTermAttribute>();
+                PosIncAtt = AddAttribute<IPositionIncrementAttribute>();
+            }
+
+            public sealed override bool IncrementToken()
+            {
+                if (AddSynonym) // inject our synonym
+                {
+                    ClearAttributes();
+                    TermAtt.SetEmpty().Append("\u570b");
+                    PosIncAtt.PositionIncrement = 0;
+                    AddSynonym = false;
+                    return true;
+                }
+
+                if (input.IncrementToken())
+                {
+                    AddSynonym = TermAtt.ToString().Equals("\u56fd");
+                    return true;
+                }
+                else
+                {
+                    return false;
+                }
+            }
+        }
+
+        protected class MockCJKSynonymAnalyzer : Analyzer
+        {
+            public override TokenStreamComponents CreateComponents(string fieldName, System.IO.TextReader reader)
+            {
+                Tokenizer tokenizer = new SimpleCJKTokenizer(reader);
+                return new TokenStreamComponents(tokenizer, new MockCJKSynonymFilter(tokenizer));
+            }
+        }
+
+        /// <summary>
+        /// simple CJK synonym test
+        /// </summary>
+        [Test]
+        public void TestCJKSynonym()
+        {
+            BooleanQuery expected = new BooleanQuery(true);
+            expected.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+            expected.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
+            assertEquals(expected, qp.Parse("\u56fd"));
+            qp.DefaultOperator = (QueryParserBase.Operator.AND);
+            assertEquals(expected, qp.Parse("\u56fd"));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("\u56fd^2"));
+        }
+
+        /// <summary>
+        /// synonyms with default OR operator 
+        /// </summary>
+        [Test]
+        public void TestCJKSynonymsOR()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Index.Term("field", "\u4e2d")), BooleanClause.Occur.SHOULD);
+            BooleanQuery inner = new BooleanQuery(true);
+            inner.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+            inner.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner, BooleanClause.Occur.SHOULD);
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd"));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd^2"));
+        }
+
+        /// <summary>
+        /// more complex synonyms with default OR operator
+        /// </summary>
+        [Test]
+        public void TestCJKSynonymsOR2()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Index.Term("field", "\u4e2d")), BooleanClause.Occur.SHOULD);
+            BooleanQuery inner = new BooleanQuery(true);
+            inner.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+            inner.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner, BooleanClause.Occur.SHOULD);
+            BooleanQuery inner2 = new BooleanQuery(true);
+            inner2.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+            inner2.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner2, BooleanClause.Occur.SHOULD);
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd\u56fd"));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd\u56fd^2"));
+        }
+
+        /// <summary>
+        /// synonyms with default AND operator
+        /// </summary>
+        [Test]
+        public void TestCJKSynonymsAND()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Index.Term("field", "\u4e2d")), BooleanClause.Occur.MUST);
+            BooleanQuery inner = new BooleanQuery(true);
+            inner.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+            inner.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner, BooleanClause.Occur.MUST);
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
+            qp.DefaultOperator = (QueryParserBase.Operator.AND);
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd"));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd^2"));
+        }
+
+        /// <summary>
+        /// more complex synonyms with default AND operator
+        /// </summary>
+        [Test]
+        public void TestCJKSynonymsAND2()
+        {
+            BooleanQuery expected = new BooleanQuery();
+            expected.Add(new TermQuery(new Index.Term("field", "\u4e2d")), BooleanClause.Occur.MUST);
+            BooleanQuery inner = new BooleanQuery(true);
+            inner.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+            inner.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner, BooleanClause.Occur.MUST);
+            BooleanQuery inner2 = new BooleanQuery(true);
+            inner2.Add(new TermQuery(new Index.Term("field", "\u56fd")), BooleanClause.Occur.SHOULD);
+            inner2.Add(new TermQuery(new Index.Term("field", "\u570b")), BooleanClause.Occur.SHOULD);
+            expected.Add(inner2, BooleanClause.Occur.MUST);
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
+            qp.DefaultOperator = (QueryParserBase.Operator.AND);
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd\u56fd"));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("\u4e2d\u56fd\u56fd^2"));
+        }
+
+        [Test]
+        public void TestCJKSynonymsPhrase()
+        {
+            MultiPhraseQuery expected = new MultiPhraseQuery();
+            expected.Add(new Index.Term("field", "\u4e2d"));
+            expected.Add(new Index.Term[] { new Index.Term("field", "\u56fd"), new Index.Term("field", "\u570b") });
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockCJKSynonymAnalyzer());
+            qp.DefaultOperator = (QueryParserBase.Operator.AND);
+            assertEquals(expected, qp.Parse("\"\u4e2d\u56fd\""));
+            expected.Boost = (2.0f);
+            assertEquals(expected, qp.Parse("\"\u4e2d\u56fd\"^2"));
+            expected.Slop = (3);
+            assertEquals(expected, qp.Parse("\"\u4e2d\u56fd\"~3^2"));
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/ComplexPhrase/TestComplexPhraseQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/ComplexPhrase/TestComplexPhraseQuery.cs b/src/Lucene.Net.Tests.QueryParser/ComplexPhrase/TestComplexPhraseQuery.cs
new file mode 100644
index 0000000..2c2d6e2
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/ComplexPhrase/TestComplexPhraseQuery.cs
@@ -0,0 +1,214 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Store;
+using Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.QueryParser.ComplexPhrase
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class TestComplexPhraseQuery : LuceneTestCase
+    {
+        Directory rd;
+        Analyzer analyzer;
+        DocData[] docsContent = {
+            new DocData("john smith", "1", "developer"),
+            new DocData("johathon smith", "2", "developer"),
+            new DocData("john percival smith", "3", "designer"),
+            new DocData("jackson waits tom", "4", "project manager")
+        };
+
+        private IndexSearcher searcher;
+        private IndexReader reader;
+
+        string defaultFieldName = "name";
+
+        bool inOrder = true;
+
+        [Test]
+        public void TestComplexPhrases()
+        {
+            CheckMatches("\"john smith\"", "1"); // Simple multi-term still works
+            CheckMatches("\"j*   smyth~\"", "1,2"); // wildcards and fuzzies are OK in
+            // phrases
+            CheckMatches("\"(jo* -john)  smith\"", "2"); // boolean logic works
+            CheckMatches("\"jo*  smith\"~2", "1,2,3"); // position logic works.
+            CheckMatches("\"jo* [sma TO smZ]\" ", "1,2"); // range queries supported
+            CheckMatches("\"john\"", "1,3"); // Simple single-term still works
+            CheckMatches("\"(john OR johathon)  smith\"", "1,2"); // boolean logic with
+            // brackets works.
+            CheckMatches("\"(jo* -john) smyth~\"", "2"); // boolean logic with
+            // brackets works.
+
+            // CheckMatches("\"john -percival\"", "1"); // not logic doesn't work
+            // currently :(.
+
+            CheckMatches("\"john  nosuchword*\"", ""); // phrases with clauses producing
+            // empty sets
+
+            CheckBadQuery("\"jo*  id:1 smith\""); // mixing fields in a phrase is bad
+            CheckBadQuery("\"jo* \"smith\" \""); // phrases inside phrases is bad
+        }
+
+        [Test]
+        public void TestUnOrderedProximitySearches()
+        {
+            inOrder = true;
+            CheckMatches("\"smith jo*\"~2", ""); // ordered proximity produces empty set
+
+            inOrder = false;
+            CheckMatches("\"smith jo*\"~2", "1,2,3"); // un-ordered proximity
+        }
+
+        private void CheckBadQuery(String qString)
+        {
+            ComplexPhraseQueryParser qp = new ComplexPhraseQueryParser(TEST_VERSION_CURRENT, defaultFieldName, analyzer);
+            qp.InOrder = inOrder;
+            Exception expected = null;
+            try
+            {
+                qp.Parse(qString);
+            }
+            catch (Exception e)
+            {
+                expected = e;
+            }
+            assertNotNull("Expected parse error in " + qString, expected);
+        }
+
+        private void CheckMatches(string qString, string expectedVals)
+        {
+            ComplexPhraseQueryParser qp = new ComplexPhraseQueryParser(TEST_VERSION_CURRENT, defaultFieldName, analyzer);
+            qp.InOrder = inOrder;
+            qp.FuzzyPrefixLength = 1; // usually a good idea
+
+            Query q = qp.Parse(qString);
+
+            HashSet<string> expecteds = new HashSet<string>();
+            string[] vals = expectedVals.Split(new char[] {','}, StringSplitOptions.RemoveEmptyEntries);
+            for (int i = 0; i < vals.Length; i++)
+            {
+                if (vals[i].Length > 0)
+                    expecteds.Add(vals[i]);
+            }
+
+            TopDocs td = searcher.Search(q, 10);
+            ScoreDoc[] sd = td.ScoreDocs;
+            for (int i = 0; i < sd.Length; i++)
+            {
+                Document doc = searcher.Doc(sd[i].Doc);
+                string id = doc.Get("id");
+                assertTrue(qString + "matched doc#" + id + " not expected", expecteds
+                    .Contains(id));
+                expecteds.Remove(id);
+            }
+
+            assertEquals(qString + " missing some matches ", 0, expecteds.Count);
+        }
+
+        [Test]
+        public void TestFieldedQuery()
+        {
+            CheckMatches("name:\"john smith\"", "1");
+            CheckMatches("name:\"j*   smyth~\"", "1,2");
+            CheckMatches("role:\"developer\"", "1,2");
+            CheckMatches("role:\"p* manager\"", "4");
+            CheckMatches("role:de*", "1,2,3");
+            CheckMatches("name:\"j* smyth~\"~5", "1,2,3");
+            CheckMatches("role:\"p* manager\" AND name:jack*", "4");
+            CheckMatches("+role:developer +name:jack*", "");
+            CheckMatches("name:\"john smith\"~2 AND role:designer AND id:3", "3");
+        }
+
+        [Test]
+        public void TestHashcodeEquals()
+        {
+            ComplexPhraseQueryParser qp = new ComplexPhraseQueryParser(TEST_VERSION_CURRENT, defaultFieldName, analyzer);
+            qp.InOrder = true;
+            qp.FuzzyPrefixLength = 1;
+
+            String qString = "\"aaa* bbb*\"";
+
+            Query q = qp.Parse(qString);
+            Query q2 = qp.Parse(qString);
+
+            assertEquals(q.GetHashCode(), q2.GetHashCode());
+            assertEquals(q, q2);
+
+            qp.InOrder = (false); // SOLR-6011
+
+            q2 = qp.Parse(qString);
+
+            // although the general contract of hashCode can't guarantee different values, if we only change one thing
+            // about a single query, it normally should result in a different value (and will with the current
+            // implementation in ComplexPhraseQuery)
+            assertTrue(q.GetHashCode() != q2.GetHashCode());
+            assertTrue(!q.equals(q2));
+            assertTrue(!q2.equals(q));
+        }
+
+        public override void SetUp()
+        {
+            base.SetUp();
+
+            analyzer = new MockAnalyzer(Random());
+            rd = NewDirectory();
+            using (IndexWriter w = new IndexWriter(rd, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)))
+            {
+                for (int i = 0; i < docsContent.Length; i++)
+                {
+                    Document doc = new Document();
+                    doc.Add(NewTextField("name", docsContent[i].Name, Field.Store.YES));
+                    doc.Add(NewTextField("id", docsContent[i].Id, Field.Store.YES));
+                    doc.Add(NewTextField("role", docsContent[i].Role, Field.Store.YES));
+                    w.AddDocument(doc);
+                }
+            }
+            reader = DirectoryReader.Open(rd);
+            searcher = NewSearcher(reader);
+        }
+
+        public override void TearDown()
+        {
+            reader.Dispose();
+            rd.Dispose();
+            base.TearDown();
+        }
+
+
+        private class DocData
+        {
+            public DocData(string name, string id, string role)
+            {
+                this.Name = name;
+                this.Id = id;
+                this.Role = role;
+            }
+
+            public string Name { get; private set; }
+            public string Id { get; private set; }
+            public string Role { get; private set; }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Ext/ExtensionStub.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Ext/ExtensionStub.cs b/src/Lucene.Net.Tests.QueryParser/Ext/ExtensionStub.cs
new file mode 100644
index 0000000..cbef5d8
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Ext/ExtensionStub.cs
@@ -0,0 +1,30 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Search;
+
+namespace Lucene.Net.QueryParser.Ext
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    internal class ExtensionStub : ParserExtension
+    {
+        public override Query Parse(ExtensionQuery components)
+        {
+            return new TermQuery(new Term(components.Field, components.RawQueryString));
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Ext/TestExtendableQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Ext/TestExtendableQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Ext/TestExtendableQueryParser.cs
new file mode 100644
index 0000000..7e2e99e
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Ext/TestExtendableQueryParser.cs
@@ -0,0 +1,145 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.QueryParser.Classic;
+using Lucene.Net.Search;
+using NUnit.Framework;
+using System.Globalization;
+
+namespace Lucene.Net.QueryParser.Ext
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Testcase for the class <see cref="ExtendableQueryParser"/>
+    /// </summary>
+    [TestFixture]
+    public class TestExtendableQueryParser : TestQueryParser
+    {
+        private static char[] DELIMITERS = new char[] {
+            Extensions.DEFAULT_EXTENSION_FIELD_DELIMITER, '-', '|' };
+
+        public override Classic.QueryParser GetParser(Analyzer a)
+        {
+            return GetParser(a, null);
+        }
+
+        public Classic.QueryParser GetParser(Analyzer a, Extensions extensions)
+        {
+            if (a == null)
+                a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+            Classic.QueryParser qp = extensions == null ? new ExtendableQueryParser(
+                TEST_VERSION_CURRENT, DefaultField, a) : new ExtendableQueryParser(
+                TEST_VERSION_CURRENT, DefaultField, a, extensions);
+            qp.DefaultOperator = QueryParserBase.OR_OPERATOR;
+            return qp;
+        }
+
+        [Test]
+        public void TestUnescapedExtDelimiter()
+        {
+            Extensions ext = NewExtensions(':');
+            ext.Add("testExt", new ExtensionStub());
+            ExtendableQueryParser parser = (ExtendableQueryParser)GetParser(null, ext);
+            try
+            {
+                parser.Parse("aField:testExt:\"foo \\& bar\"");
+                fail("extension field delimiter is not escaped");
+            }
+            catch (ParseException e)
+            {
+            }
+        }
+
+        [Test]
+        public void TestExtFieldUnqoted()
+        {
+            for (int i = 0; i < DELIMITERS.Length; i++)
+            {
+                Extensions ext = NewExtensions(DELIMITERS[i]);
+                ext.Add("testExt", new ExtensionStub());
+                ExtendableQueryParser parser = (ExtendableQueryParser)GetParser(null,
+                    ext);
+                string field = ext.BuildExtensionField("testExt", "aField");
+                Query query = parser.Parse(string.Format(CultureInfo.InvariantCulture, "{0}:foo bar", field));
+                assertTrue("expected instance of BooleanQuery but was "
+                    + query.GetType(), query is BooleanQuery);
+                BooleanQuery bquery = (BooleanQuery)query;
+                BooleanClause[] clauses = bquery.Clauses;
+                assertEquals(2, clauses.Length);
+                BooleanClause booleanClause = clauses[0];
+                query = booleanClause.Query;
+                assertTrue("expected instance of TermQuery but was " + query.GetType(),
+                    query is TermQuery);
+                TermQuery tquery = (TermQuery)query;
+                assertEquals("aField", tquery.Term
+                    .Field);
+                assertEquals("foo", tquery.Term.Text());
+
+                booleanClause = clauses[1];
+                query = booleanClause.Query;
+                assertTrue("expected instance of TermQuery but was " + query.GetType(),
+                    query is TermQuery);
+                tquery = (TermQuery)query;
+                assertEquals(DefaultField, tquery.Term.Field);
+                assertEquals("bar", tquery.Term.Text());
+            }
+        }
+
+        [Test]
+        public void TestExtDefaultField()
+        {
+            for (int i = 0; i < DELIMITERS.Length; i++)
+            {
+                Extensions ext = NewExtensions(DELIMITERS[i]);
+                ext.Add("testExt", new ExtensionStub());
+                ExtendableQueryParser parser = (ExtendableQueryParser)GetParser(null,
+                    ext);
+                string field = ext.BuildExtensionField("testExt");
+                Query parse = parser.Parse(string.Format(CultureInfo.InvariantCulture, "{0}:\"foo \\& bar\"", field));
+                assertTrue("expected instance of TermQuery but was " + parse.GetType(),
+                    parse is TermQuery);
+                TermQuery tquery = (TermQuery)parse;
+                assertEquals(DefaultField, tquery.Term.Field);
+                assertEquals("foo & bar", tquery.Term.Text());
+            }
+        }
+
+        public Extensions NewExtensions(char delimiter)
+        {
+            return new Extensions(delimiter);
+        }
+
+        [Test]
+        public void TestExtField()
+        {
+            for (int i = 0; i < DELIMITERS.Length; i++)
+            {
+                Extensions ext = NewExtensions(DELIMITERS[i]);
+                ext.Add("testExt", new ExtensionStub());
+                ExtendableQueryParser parser = (ExtendableQueryParser)GetParser(null,
+                    ext);
+                string field = ext.BuildExtensionField("testExt", "afield");
+                Query parse = parser.Parse(string.Format(CultureInfo.InvariantCulture, "{0}:\"foo \\& bar\"", field));
+                assertTrue("expected instance of TermQuery but was " + parse.GetType(),
+                    parse is TermQuery);
+                TermQuery tquery = (TermQuery)parse;
+                assertEquals("afield", tquery.Term.Field);
+                assertEquals("foo & bar", tquery.Term.Text());
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Ext/TestExtensions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Ext/TestExtensions.cs b/src/Lucene.Net.Tests.QueryParser/Ext/TestExtensions.cs
new file mode 100644
index 0000000..4850987
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Ext/TestExtensions.cs
@@ -0,0 +1,97 @@
+\ufeffusing Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.QueryParser.Ext
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Testcase for the <see cref="Extensions"/> class
+    /// </summary>
+    [TestFixture]
+    public class TestExtensions : LuceneTestCase
+    {
+        private Extensions ext;
+
+        public override void SetUp()
+        {
+            base.SetUp();
+            this.ext = new Extensions();
+        }
+
+        [Test]
+        public void TestBuildExtensionField()
+        {
+            assertEquals("field\\:key", ext.BuildExtensionField("key", "field"));
+            assertEquals("\\:key", ext.BuildExtensionField("key"));
+
+            ext = new Extensions('.');
+            assertEquals("field.key", ext.BuildExtensionField("key", "field"));
+            assertEquals(".key", ext.BuildExtensionField("key"));
+        }
+
+        [Test]
+        public void TestSplitExtensionField()
+        {
+            assertEquals("field\\:key", ext.BuildExtensionField("key", "field"));
+            assertEquals("\\:key", ext.BuildExtensionField("key"));
+            
+            ext = new Extensions('.');
+            assertEquals("field.key", ext.BuildExtensionField("key", "field"));
+            assertEquals(".key", ext.BuildExtensionField("key"));
+        }
+
+        [Test]
+        public void TestAddGetExtension()
+        {
+            ParserExtension extension = new ExtensionStub();
+            assertNull(ext.GetExtension("foo"));
+            ext.Add("foo", extension);
+            Assert.AreSame(extension, ext.GetExtension("foo"));
+            ext.Add("foo", null);
+            assertNull(ext.GetExtension("foo"));
+        }
+
+        [Test]
+        public void TestGetExtDelimiter()
+        {
+            assertEquals(Extensions.DEFAULT_EXTENSION_FIELD_DELIMITER, this.ext
+                .ExtensionFieldDelimiter);
+            ext = new Extensions('?');
+            assertEquals('?', this.ext.ExtensionFieldDelimiter);
+        }
+
+        [Test]
+        public void TestEscapeExtension()
+        {
+            assertEquals("abc\\:\\?\\{\\}\\[\\]\\\\\\(\\)\\+\\-\\!\\~", ext
+                .EscapeExtensionField("abc:?{}[]\\()+-!~"));
+            try
+            {
+                ext.EscapeExtensionField(null);
+                fail("should throw NPE - escape string is null");
+            }
+            //catch (NullPointerException e)
+            catch (Exception e)
+            {
+                // 
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj b/src/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
new file mode 100644
index 0000000..326ad05
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
@@ -0,0 +1,94 @@
+\ufeff<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
+  <PropertyGroup>
+    <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+    <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
+    <ProjectGuid>{5719FB4F-BF80-40E5-BACC-37E8E18FCA2E}</ProjectGuid>
+    <OutputType>Library</OutputType>
+    <AppDesignerFolder>Properties</AppDesignerFolder>
+    <RootNamespace>Lucene.Net.Tests.QueryParser</RootNamespace>
+    <AssemblyName>Lucene.Net.Tests.QueryParser</AssemblyName>
+    <TargetFrameworkVersion>v4.5.1</TargetFrameworkVersion>
+    <FileAlignment>512</FileAlignment>
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
+    <DebugSymbols>true</DebugSymbols>
+    <DebugType>full</DebugType>
+    <Optimize>false</Optimize>
+    <OutputPath>bin\Debug\</OutputPath>
+    <DefineConstants>DEBUG;TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+    <DebugType>pdbonly</DebugType>
+    <Optimize>true</Optimize>
+    <OutputPath>bin\Release\</OutputPath>
+    <DefineConstants>TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+  </PropertyGroup>
+  <ItemGroup>
+    <Reference Include="nunit.framework">
+      <HintPath>..\..\packages\NUnit.2.6.3\lib\nunit.framework.dll</HintPath>
+    </Reference>
+    <Reference Include="System" />
+    <Reference Include="System.Core" />
+    <Reference Include="System.Xml.Linq" />
+    <Reference Include="System.Data.DataSetExtensions" />
+    <Reference Include="Microsoft.CSharp" />
+    <Reference Include="System.Data" />
+    <Reference Include="System.Xml" />
+  </ItemGroup>
+  <ItemGroup>
+    <Compile Include="Analyzing\TestAnalyzingQueryParser.cs" />
+    <Compile Include="Classic\TestMultiFieldQueryParser.cs" />
+    <Compile Include="Classic\TestMultiPhraseQueryParsing.cs" />
+    <Compile Include="Classic\TestQueryParser.cs" />
+    <Compile Include="ComplexPhrase\TestComplexPhraseQuery.cs" />
+    <Compile Include="Ext\ExtensionStub.cs" />
+    <Compile Include="Ext\TestExtendableQueryParser.cs" />
+    <Compile Include="Ext\TestExtensions.cs" />
+    <Compile Include="Properties\AssemblyInfo.cs" />
+    <Compile Include="Classic\TestMultiAnalyzer.cs" />
+    <Compile Include="Simple\TestSimpleQueryParser.cs" />
+    <Compile Include="Surround\Query\BooleanQueryTst.cs" />
+    <Compile Include="Surround\Query\ExceptionQueryTst.cs" />
+    <Compile Include="Surround\Query\SingleFieldTestDb.cs" />
+    <Compile Include="Surround\Query\SrndQueryTest.cs" />
+    <Compile Include="Surround\Query\Test01Exceptions.cs" />
+    <Compile Include="Surround\Query\Test02Boolean.cs" />
+    <Compile Include="Surround\Query\Test03Distance.cs" />
+    <Compile Include="Util\QueryParserTestBase.cs" />
+  </ItemGroup>
+  <ItemGroup>
+    <None Include="packages.config" />
+  </ItemGroup>
+  <ItemGroup>
+    <ProjectReference Include="..\Lucene.Net.QueryParser\Lucene.Net.QueryParser.csproj">
+      <Project>{949ba34b-6ae6-4ce3-b578-61e13e4d76bf}</Project>
+      <Name>Lucene.Net.QueryParser</Name>
+    </ProjectReference>
+    <ProjectReference Include="..\Lucene.Net.Analysis.Common\Lucene.Net.Analysis.Common.csproj">
+      <Project>{4add0bbc-b900-4715-9526-d871de8eea64}</Project>
+      <Name>Lucene.Net.Analysis.Common</Name>
+    </ProjectReference>
+    <ProjectReference Include="..\Lucene.Net.Core\Lucene.Net.csproj">
+      <Project>{5d4ad9be-1ffb-41ab-9943-25737971bf57}</Project>
+      <Name>Lucene.Net</Name>
+    </ProjectReference>
+    <ProjectReference Include="..\Lucene.Net.TestFramework\Lucene.Net.TestFramework.csproj">
+      <Project>{b2c0d749-ce34-4f62-a15e-00cb2ff5ddb3}</Project>
+      <Name>Lucene.Net.TestFramework</Name>
+    </ProjectReference>
+  </ItemGroup>
+  <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
+  <!-- To modify your build process, add your task inside one of the targets below and uncomment it. 
+       Other similar extension points exist, see Microsoft.Common.targets.
+  <Target Name="BeforeBuild">
+  </Target>
+  <Target Name="AfterBuild">
+  </Target>
+  -->
+</Project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.Tests.QueryParser/Properties/AssemblyInfo.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Properties/AssemblyInfo.cs b/src/Lucene.Net.Tests.QueryParser/Properties/AssemblyInfo.cs
new file mode 100644
index 0000000..549c7bf
--- /dev/null
+++ b/src/Lucene.Net.Tests.QueryParser/Properties/AssemblyInfo.cs
@@ -0,0 +1,36 @@
+\ufeffusing System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following 
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("Lucene.Net.Tests.QueryParser")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("Lucene.Net.Tests.QueryParser")]
+[assembly: AssemblyCopyright("Copyright �  2016")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible 
+// to COM components.  If you need to access a type in this assembly from 
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("27d0ae76-3e51-454c-9c4a-f913fde0ed0a")]
+
+// Version information for an assembly consists of the following four values:
+//
+//      Major Version
+//      Minor Version 
+//      Build Number
+//      Revision
+//
+// You can specify all the values or you can default the Build and Revision Numbers 
+// by using the '*' as shown below:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]


[15/50] [abbrv] lucenenet git commit: Ported QueryParser.Surround namespace + tests.

Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs b/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
new file mode 100644
index 0000000..6cddb9c
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/SpanNearClauseFactory.cs
@@ -0,0 +1,93 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Search.Spans;
+using Lucene.Net.Support;
+using System;
+using System.Collections.Generic;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Factory for <see cref="SpanOrQuery"/>
+    /// </summary>
+    public class SpanNearClauseFactory
+    {
+        public SpanNearClauseFactory(IndexReader reader, string fieldName, BasicQueryFactory qf) {
+            this.reader = reader;
+            this.fieldName = fieldName;
+            this.weightBySpanQuery = new HashMap<SpanQuery, float>();
+            this.qf = qf;
+          }
+
+        private IndexReader reader;
+        private string fieldName;
+        private IDictionary<SpanQuery, float> weightBySpanQuery;
+        private BasicQueryFactory qf;
+
+        public virtual IndexReader IndexReader { get { return reader; } }
+
+        public virtual string FieldName { get { return fieldName; } }
+
+        public virtual BasicQueryFactory BasicQueryFactory { get { return qf; } }
+
+        public virtual int Count { get { return weightBySpanQuery.Count; } }
+
+        public virtual void Clear() { weightBySpanQuery.Clear(); }
+
+        protected virtual void AddSpanQueryWeighted(SpanQuery sq, float weight)
+        {
+            float w;
+            if (weightBySpanQuery.ContainsKey(sq))
+                w = weightBySpanQuery[sq] + weight;
+            else
+                w = weight;
+            weightBySpanQuery[sq] = w;
+        }
+
+        public virtual void AddTermWeighted(Term t, float weight)
+        {
+            SpanTermQuery stq = qf.NewSpanTermQuery(t);
+            /* CHECKME: wrap in Hashable...? */
+            AddSpanQueryWeighted(stq, weight);
+        }
+
+        public virtual void AddSpanQuery(Search.Query q)
+        {
+            if (q == SrndQuery.TheEmptyLcnQuery)
+                return;
+            if (!(q is SpanQuery))
+                throw new InvalidOperationException("Expected SpanQuery: " + q.ToString(FieldName));
+            AddSpanQueryWeighted((SpanQuery)q, q.Boost);
+        }
+
+        public SpanQuery MakeSpanClause()
+        {
+            List<SpanQuery> spanQueries = new List<SpanQuery>();
+            foreach (var wsq in weightBySpanQuery)
+            {
+                wsq.Key.Boost = wsq.Value;
+                spanQueries.Add(wsq.Key);
+            }
+            if (spanQueries.Count == 1)
+                return spanQueries[0];
+            else
+                return new SpanOrQuery(spanQueries.ToArray());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs b/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs
new file mode 100644
index 0000000..7a1a8b3
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/SrndBooleanQuery.cs
@@ -0,0 +1,51 @@
+\ufeffusing Lucene.Net.Search;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public static class SrndBooleanQuery
+    {
+        public static void AddQueriesToBoolean(
+            BooleanQuery bq,
+            IEnumerable<Search.Query> queries,
+            BooleanClause.Occur occur)
+        {
+            foreach (var query in queries)
+            {
+                bq.Add(query, occur);
+            }
+        }
+
+        public static Search.Query MakeBooleanQuery(
+            IEnumerable<Search.Query> queries,
+            BooleanClause.Occur occur)
+        {
+            if (queries.Count() <= 1)
+            {
+                throw new InvalidOperationException("Too few subqueries: " + queries.Count());
+            }
+            BooleanQuery bq = new BooleanQuery();
+            AddQueriesToBoolean(bq, queries, occur);
+            return bq;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/SrndPrefixQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SrndPrefixQuery.cs b/Lucene.Net.QueryParser/Surround/Query/SrndPrefixQuery.cs
new file mode 100644
index 0000000..4044b09
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/SrndPrefixQuery.cs
@@ -0,0 +1,108 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Util;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Query that matches String prefixes
+    /// </summary>
+    public class SrndPrefixQuery : SimpleTerm
+    {
+        private readonly BytesRef prefixRef;
+        public SrndPrefixQuery(string prefix, bool quoted, char truncator)
+            : base(quoted)
+        {
+            this.prefix = prefix;
+            prefixRef = new BytesRef(prefix);
+            this.truncator = truncator;
+        }
+
+        private readonly string prefix;
+        public virtual string Prefix { get { return prefix; } }
+
+        private readonly char truncator;
+        public virtual char SuffixOperator { get { return truncator; } }
+
+        public virtual Term GetLucenePrefixTerm(string fieldName)
+        {
+            return new Term(fieldName, Prefix);
+        }
+
+        public override string ToStringUnquoted()
+        {
+            return Prefix;
+        }
+
+        protected override void SuffixToString(StringBuilder r)
+        {
+            r.Append(SuffixOperator);
+        }
+
+        public override void VisitMatchingTerms(IndexReader reader, string fieldName, IMatchingTermVisitor mtv)
+        {
+            /* inspired by PrefixQuery.rewrite(): */
+            Terms terms = MultiFields.GetTerms(reader, fieldName);
+            if (terms != null)
+            {
+                TermsEnum termsEnum = terms.Iterator(null);
+
+                bool skip = false;
+                TermsEnum.SeekStatus status = termsEnum.SeekCeil(new BytesRef(Prefix));
+                if (status == TermsEnum.SeekStatus.FOUND)
+                {
+                    mtv.VisitMatchingTerm(GetLucenePrefixTerm(fieldName));
+                }
+                else if (status == TermsEnum.SeekStatus.NOT_FOUND)
+                {
+                    if (StringHelper.StartsWith(termsEnum.Term(), prefixRef))
+                    {
+                        mtv.VisitMatchingTerm(new Term(fieldName, termsEnum.Term().Utf8ToString()));
+                    }
+                    else
+                    {
+                        skip = true;
+                    }
+                }
+                else
+                {
+                    // EOF
+                    skip = true;
+                }
+
+                if (!skip)
+                {
+                    while (true)
+                    {
+                        BytesRef text = termsEnum.Next();
+                        if (text != null && StringHelper.StartsWith(text, prefixRef))
+                        {
+                            mtv.VisitMatchingTerm(new Term(fieldName, text.Utf8ToString()));
+                        }
+                        else
+                        {
+                            break;
+                        }
+                    }
+                }
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs b/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs
new file mode 100644
index 0000000..57b19cc
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/SrndQuery.cs
@@ -0,0 +1,149 @@
+\ufeffusing Lucene.Net.Search;
+using Lucene.Net.Support;
+using System;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Lowest level base class for surround queries 
+    /// </summary>
+    public abstract class SrndQuery : ICloneable
+    {
+        //public SrndQuery() { }
+
+        private float weight = (float)1.0;
+        private bool weighted = false;
+
+        public virtual bool IsWeighted { get { return weighted; } }
+
+        public virtual float Weight 
+        { 
+            get { return weight; }
+            set
+            {
+                weight = value; /* as parsed from the query text */
+                weighted = true;
+            }
+        }
+
+        public virtual string WeightString { get { return Number.ToString(Weight); } }
+
+        public virtual string WeightOperator { get { return "^"; } }
+
+
+        protected virtual void WeightToString(StringBuilder r)
+        { 
+            /* append the weight part of a query */
+            if (IsWeighted)
+            {
+                r.Append(WeightOperator);
+                r.Append(WeightString);
+            }
+        }
+
+        public virtual Search.Query MakeLuceneQueryField(string fieldName, BasicQueryFactory qf)
+        {
+            Search.Query q = MakeLuceneQueryFieldNoBoost(fieldName, qf);
+            if (IsWeighted)
+            {
+                q.Boost=(Weight * q.Boost); /* weight may be at any level in a SrndQuery */
+            }
+            return q;
+        }
+
+        public abstract Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf);
+
+        /// <summary>
+        /// This method is used by <see cref="M:GetHashCode()"/> and <see cref="M:Equals(Object)"/>,
+        /// see LUCENE-2945.
+        /// </summary>
+        /// <returns></returns>
+        public abstract override string ToString();
+
+        public virtual bool IsFieldsSubQueryAcceptable { get { return true; } }
+
+        /// <summary> Shallow clone. Subclasses must override this if they
+        /// need to clone any members deeply,
+        /// </summary>
+        public virtual object Clone()
+        {
+            object clone = null;
+            try
+            {
+                clone = base.MemberwiseClone();
+            }
+            catch (Exception e)
+            {
+                throw new SystemException(e.Message, e); // shouldn't happen
+            }
+            return clone;
+        }
+
+        /// <summary>
+        /// For subclasses of <see cref="SrndQuery"/> within the package
+        /// {@link org.apache.lucene.queryparser.surround.query}
+        /// it is not necessary to override this method, <see cref="M:ToString()"/>
+        /// </summary>
+        public override int GetHashCode()
+        {
+            return GetType().GetHashCode() ^ ToString().GetHashCode();
+        }
+
+        /// <summary>
+        /// For subclasses of <see cref="SrndQuery"/> within the package
+        /// {@link org.apache.lucene.queryparser.surround.query}
+        /// it is not necessary to override this method,
+        /// @see #toString()
+        /// </summary>
+        /// <param name="obj"></param>
+        /// <returns></returns>
+        public override bool Equals(object obj)
+        {
+            if (obj == null)
+                return false;
+            if (!GetType().Equals(obj.GetType()))
+                return false;
+            return ToString().Equals(obj.ToString());
+        }
+
+        /// <summary> An empty Lucene query  </summary>
+        public readonly static Search.Query TheEmptyLcnQuery = new EmptyLcnQuery(); /* no changes allowed */ 
+  
+        internal sealed class EmptyLcnQuery : BooleanQuery
+        {
+            public override float Boost
+            {
+                get { return base.Boost; }
+                set { throw new NotSupportedException(); }
+            }
+
+            public override void Add(BooleanClause clause)
+            {
+                throw new NotSupportedException();
+            }
+
+            public override void Add(Search.Query query, BooleanClause.Occur occur)
+            {
+                throw new NotSupportedException();
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/SrndTermQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SrndTermQuery.cs b/Lucene.Net.QueryParser/Surround/Query/SrndTermQuery.cs
new file mode 100644
index 0000000..45885a1
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/SrndTermQuery.cs
@@ -0,0 +1,63 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Util;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Simple single-term clause
+    /// </summary>
+    public class SrndTermQuery : SimpleTerm
+    {
+        public SrndTermQuery(string termText, bool quoted)
+            : base(quoted)
+        {
+            this.termText = termText;
+        }
+
+        private readonly string termText;
+        public virtual string TermText { get { return termText; } }
+
+        public virtual Term GetLuceneTerm(string fieldName)
+        {
+            return new Term(fieldName, TermText);
+        }
+
+        public override string ToStringUnquoted()
+        {
+            return TermText;
+        }
+
+        public override void VisitMatchingTerms(IndexReader reader, string fieldName, IMatchingTermVisitor mtv)
+        {
+            /* check term presence in index here for symmetry with other SimpleTerm's */
+            Terms terms = MultiFields.GetTerms(reader, fieldName);
+            if (terms != null)
+            {
+                TermsEnum termsEnum = terms.Iterator(null);
+
+                TermsEnum.SeekStatus status = termsEnum.SeekCeil(new BytesRef(TermText));
+                if (status == TermsEnum.SeekStatus.FOUND)
+                {
+                    mtv.VisitMatchingTerm(GetLuceneTerm(fieldName));
+                }
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/SrndTruncQuery.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/SrndTruncQuery.cs b/Lucene.Net.QueryParser/Surround/Query/SrndTruncQuery.cs
new file mode 100644
index 0000000..5ed9ff3
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/SrndTruncQuery.cs
@@ -0,0 +1,139 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Util;
+using System;
+using System.Text;
+using System.Text.RegularExpressions;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public class SrndTruncQuery : SimpleTerm
+    {
+        public SrndTruncQuery(string truncated, char unlimited, char mask)
+            : base(false) /* not quoted */
+        {
+            this.truncated = truncated;
+            this.unlimited = unlimited;
+            this.mask = mask;
+            TruncatedToPrefixAndPattern();
+        }
+
+        private readonly string truncated;
+        private readonly char unlimited;
+        private readonly char mask;
+
+        private string prefix;
+        private BytesRef prefixRef;
+        private Regex pattern;
+
+        public virtual string Truncated { get { return truncated; } }
+
+        public override string ToStringUnquoted()
+        {
+            return Truncated;
+        }
+
+        protected virtual bool MatchingChar(char c)
+        {
+            return (c != unlimited) && (c != mask);
+        }
+
+        protected virtual void AppendRegExpForChar(char c, StringBuilder re)
+        {
+            if (c == unlimited)
+                re.Append(".*");
+            else if (c == mask)
+                re.Append(".");
+            else
+                re.Append(c);
+        }
+
+        protected virtual void TruncatedToPrefixAndPattern()
+        {
+            int i = 0;
+            while ((i < truncated.Length) && MatchingChar(truncated[i]))
+            {
+                i++;
+            }
+            prefix = truncated.Substring(0, i);
+            prefixRef = new BytesRef(prefix);
+
+            StringBuilder re = new StringBuilder();
+            while (i < truncated.Length)
+            {
+                AppendRegExpForChar(truncated[i], re);
+                i++;
+            }
+            pattern = new Regex(re.ToString(), RegexOptions.Compiled);
+        }
+
+        // TODO: Finish implementation
+        public override void VisitMatchingTerms(IndexReader reader, string fieldName, SimpleTerm.IMatchingTermVisitor mtv)
+        {
+            throw new NotImplementedException("Need to translate this from Java's whacky RegEx syntax");
+            //int prefixLength = prefix.Length;
+            //Terms terms = MultiFields.GetTerms(reader, fieldName);
+            //if (terms != null)
+            //{
+            //    MatchCollection matcher = pattern.Matches("");
+            //    try
+            //    {
+            //        TermsEnum termsEnum = terms.Iterator(null);
+
+            //        TermsEnum.SeekStatus status = termsEnum.SeekCeil(prefixRef);
+            //        BytesRef text;
+            //        if (status == TermsEnum.SeekStatus.FOUND)
+            //        {
+            //            text = prefixRef;
+            //        }
+            //        else if (status == TermsEnum.SeekStatus.NOT_FOUND)
+            //        {
+            //            text = termsEnum.Term();
+            //        }
+            //        else
+            //        {
+            //            text = null;
+            //        }
+
+            //        while (text != null)
+            //        {
+            //            if (text != null && StringHelper.StartsWith(text, prefixRef))
+            //            {
+            //                string textString = text.Utf8ToString();
+            //                matcher.Reset(textString.Substring(prefixLength));
+            //                if (matcher.Success)
+            //                {
+            //                    mtv.VisitMatchingTerm(new Term(fieldName, textString));
+            //                }
+            //            }
+            //            else
+            //            {
+            //                break;
+            //            }
+            //            text = termsEnum.Next();
+            //        }
+            //    }
+            //    finally
+            //    {
+            //        matcher.Reset();
+            //    }
+            //}
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.QueryParser/Surround/Query/TooManyBasicQueries.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Surround/Query/TooManyBasicQueries.cs b/Lucene.Net.QueryParser/Surround/Query/TooManyBasicQueries.cs
new file mode 100644
index 0000000..27f313c
--- /dev/null
+++ b/Lucene.Net.QueryParser/Surround/Query/TooManyBasicQueries.cs
@@ -0,0 +1,30 @@
+\ufeffnamespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// Exception thrown when <see cref="BasicQueryFactory"/> would exceed the limit
+    /// of query clauses.
+    /// </summary>
+    public class TooManyBasicQueries : System.IO.IOException
+    {
+        public TooManyBasicQueries(int maxBasicQueries)
+            : base("Exceeded maximum of " + maxBasicQueries + " basic queries.")
+        { }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj b/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
index bcf9568..2094270 100644
--- a/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
+++ b/Lucene.Net.Tests.QueryParser/Lucene.Net.Tests.QueryParser.csproj
@@ -54,6 +54,13 @@
     <Compile Include="Properties\AssemblyInfo.cs" />
     <Compile Include="Classic\TestMultiAnalyzer.cs" />
     <Compile Include="Simple\TestSimpleQueryParser.cs" />
+    <Compile Include="Surround\Query\BooleanQueryTst.cs" />
+    <Compile Include="Surround\Query\ExceptionQueryTst.cs" />
+    <Compile Include="Surround\Query\SingleFieldTestDb.cs" />
+    <Compile Include="Surround\Query\SrndQueryTest.cs" />
+    <Compile Include="Surround\Query\Test01Exceptions.cs" />
+    <Compile Include="Surround\Query\Test02Boolean.cs" />
+    <Compile Include="Surround\Query\Test03Distance.cs" />
     <Compile Include="Util\QueryParserTestBase.cs" />
   </ItemGroup>
   <ItemGroup>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs b/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs
new file mode 100644
index 0000000..6f7fcfc
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Surround/Query/BooleanQueryTst.cs
@@ -0,0 +1,142 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Search;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public class BooleanQueryTst
+    {
+        private string queryText;
+        private readonly int[] expectedDocNrs;
+        private SingleFieldTestDb dBase;
+        private string fieldName;
+        private Assert testCase;
+        private BasicQueryFactory qf;
+        private bool verbose = true;
+
+        public BooleanQueryTst(
+            string queryText,
+            int[] expectedDocNrs,
+            SingleFieldTestDb dBase,
+            string fieldName,
+            Assert testCase,
+            BasicQueryFactory qf)
+        {
+            this.queryText = queryText;
+            this.expectedDocNrs = expectedDocNrs;
+            this.dBase = dBase;
+            this.fieldName = fieldName;
+            this.testCase = testCase;
+            this.qf = qf;
+        }
+
+        public virtual bool Verbose { set { this.verbose = value; } }
+
+        public virtual string QueryText { get { return this.queryText; } }
+
+        public virtual int[] ExpectedDocNrs { get { return this.expectedDocNrs; } }
+
+        internal class TestCollector : Collector
+        { // FIXME: use check hits from Lucene tests
+            private int totalMatched;
+            private bool[] encountered;
+            private Scorer scorer = null;
+            private int docBase = 0;
+            private BooleanQueryTst parent;
+
+            public TestCollector(BooleanQueryTst parent)
+            {
+                totalMatched = 0;
+                encountered = new bool[parent.expectedDocNrs.Length];
+                this.parent = parent;
+            }
+
+            public override Scorer Scorer
+            {
+                set { this.scorer = value; }
+            }
+
+            public override bool AcceptsDocsOutOfOrder()
+            {
+                return true;
+            }
+
+            public override AtomicReaderContext NextReader
+            {
+                set { docBase = value.DocBase; }
+            }
+
+            public override void Collect(int docNr)
+            {
+                float score = scorer.Score();
+                docNr += docBase;
+                /* System.out.println(docNr + " '" + dBase.getDocs()[docNr] + "': " + score); */
+                Assert.True(score > 0.0, parent.QueryText + ": positive score");
+                Assert.True(totalMatched < parent.ExpectedDocNrs.Length, parent.QueryText + ": too many hits");
+                int i;
+                for (i = 0; i < parent.expectedDocNrs.Length; i++)
+                {
+                    if ((!encountered[i]) && (parent.ExpectedDocNrs[i] == docNr))
+                    {
+                        encountered[i] = true;
+                        break;
+                    }
+                }
+                if (i == parent.ExpectedDocNrs.Length)
+                {
+                    Assert.True(false, parent.QueryText + ": doc nr for hit not expected: " + docNr);
+                }
+                totalMatched++;
+            }
+
+            public void CheckNrHits()
+            {
+                Assert.AreEqual(parent.ExpectedDocNrs.Length, totalMatched, parent.QueryText + ": nr of hits");
+            }
+        }
+
+        public void DoTest()
+        {
+
+            if (verbose)
+            {
+                Console.WriteLine("");
+                Console.WriteLine("Query: " + queryText);
+            }
+
+            SrndQuery lq = Parser.QueryParser.Parse(queryText);
+
+            /* if (verbose) System.out.println("Srnd: " + lq.toString()); */
+
+            Search.Query query = lq.MakeLuceneQueryField(fieldName, qf);
+            /* if (verbose) System.out.println("Lucene: " + query.toString()); */
+
+            TestCollector tc = new TestCollector(this);
+            using (IndexReader reader = DirectoryReader.Open(dBase.Db))
+            {
+                IndexSearcher searcher = new IndexSearcher(reader);
+
+                searcher.Search(query, tc);
+            }
+            tc.CheckNrHits();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.Tests.QueryParser/Surround/Query/ExceptionQueryTst.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Surround/Query/ExceptionQueryTst.cs b/Lucene.Net.Tests.QueryParser/Surround/Query/ExceptionQueryTst.cs
new file mode 100644
index 0000000..7468ef9
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Surround/Query/ExceptionQueryTst.cs
@@ -0,0 +1,76 @@
+\ufeffusing Lucene.Net.QueryParser.Surround.Parser;
+using System;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public class ExceptionQueryTst
+    {
+        private string queryText;
+        private bool verbose;
+
+        public ExceptionQueryTst(string queryText, bool verbose)
+        {
+            this.queryText = queryText;
+            this.verbose = verbose;
+        }
+
+        public void DoTest(StringBuilder failQueries)
+        {
+            bool pass = false;
+            SrndQuery lq = null;
+            try
+            {
+                lq = Parser.QueryParser.Parse(queryText);
+                if (verbose)
+                {
+                    Console.WriteLine("Query: " + queryText + "\nParsed as: " + lq.ToString());
+                }
+            }
+            catch (ParseException e)
+            {
+                if (verbose)
+                {
+                    Console.WriteLine("Parse exception for query:\n"
+                                      + queryText + "\n"
+                                      + e.Message);
+                }
+                pass = true;
+            }
+            if (!pass)
+            {
+                failQueries.append(queryText);
+                failQueries.append("\nParsed as: ");
+                failQueries.append(lq.toString());
+                failQueries.append("\n");
+            }
+        }
+
+        public static string GetFailQueries(string[] exceptionQueries, bool verbose)
+        {
+            StringBuilder failQueries = new StringBuilder();
+            for (int i = 0; i < exceptionQueries.Length; i++)
+            {
+                new ExceptionQueryTst(exceptionQueries[i], verbose).DoTest(failQueries);
+            }
+            return failQueries.toString();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs b/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs
new file mode 100644
index 0000000..1221835
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Surround/Query/SingleFieldTestDb.cs
@@ -0,0 +1,55 @@
+\ufeffusing Lucene.Net.Analysis;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.Store;
+using Lucene.Net.Util;
+using System;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    public class SingleFieldTestDb
+    {
+        private Directory db;
+        private string[] docs;
+        private string fieldName;
+
+        public SingleFieldTestDb(Random random, string[] documents, string fName)
+        {
+            db = new MockDirectoryWrapper(random, new RAMDirectory());
+            docs = documents;
+            fieldName = fName;
+            using (IndexWriter writer = new IndexWriter(db, new IndexWriterConfig(
+                LuceneVersion.LUCENE_CURRENT,
+                new MockAnalyzer(random))))
+            {
+                for (int j = 0; j < docs.Length; j++)
+                {
+                    Document d = new Document();
+                    d.Add(new TextField(fieldName, docs[j], Field.Store.NO));
+                    writer.AddDocument(d);
+                }
+            }
+        }
+
+        public Directory Db { get { return db; } }
+        public string[] Docs { get { return docs; } }
+        public string Fieldname { get { return fieldName; } }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.Tests.QueryParser/Surround/Query/SrndQueryTest.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Surround/Query/SrndQueryTest.cs b/Lucene.Net.Tests.QueryParser/Surround/Query/SrndQueryTest.cs
new file mode 100644
index 0000000..ebe7e2b
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Surround/Query/SrndQueryTest.cs
@@ -0,0 +1,48 @@
+\ufeffusing Lucene.Net.Search;
+using Lucene.Net.Util;
+using NUnit.Framework;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class SrndQueryTest : LuceneTestCase
+    {
+        private void CheckEqualParsings(string s1, string s2)
+        {
+            string fieldName = "foo";
+            BasicQueryFactory qf = new BasicQueryFactory(16);
+            Search.Query lq1, lq2;
+            lq1 = Parser.QueryParser.Parse(s1).MakeLuceneQueryField(fieldName, qf);
+            lq2 = Parser.QueryParser.Parse(s2).MakeLuceneQueryField(fieldName, qf);
+            QueryUtils.CheckEqual(lq1, lq2);
+        }
+
+        [Test]
+        public void TestHashEquals()
+        {
+            //grab some sample queries from Test02Boolean and Test03Distance and
+            //check there hashes and equals
+            CheckEqualParsings("word1 w word2", " word1  w  word2 ");
+            CheckEqualParsings("2N(w1,w2,w3)", " 2N(w1, w2 , w3)");
+            CheckEqualParsings("abc?", " abc? ");
+            CheckEqualParsings("w*rd?", " w*rd?");
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.Tests.QueryParser/Surround/Query/Test01Exceptions.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Surround/Query/Test01Exceptions.cs b/Lucene.Net.Tests.QueryParser/Surround/Query/Test01Exceptions.cs
new file mode 100644
index 0000000..6ebc87a
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Surround/Query/Test01Exceptions.cs
@@ -0,0 +1,72 @@
+\ufeffusing Lucene.Net.Util;
+using NUnit.Framework;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class Test01Exceptions_ : LuceneTestCase
+    {
+        /** Main for running test case by itself. */
+        //public static void Main(string[] args)
+        //{
+        //    TestRunner.run(new TestSuite(Test01Exceptions.class));
+        //}
+
+        private bool verbose = false; /* to show actual parsing error messages */
+        private readonly string fieldName = "bi";
+
+        string[] exceptionQueries = {
+            "*",
+            "a*",
+            "ab*",
+            "?",
+            "a?",
+            "ab?",
+            "a???b",
+            "a?",
+            "a*b?",
+            "word1 word2",
+            "word2 AND",
+            "word1 OR",
+            "AND(word2)",
+            "AND(word2,)",
+            "AND(word2,word1,)",
+            "OR(word2)",
+            "OR(word2 ,",
+            "OR(word2 , word1 ,)",
+            "xx NOT",
+            "xx (a AND b)",
+            "(a AND b",
+            "a OR b)",
+            "or(word2+ not ord+, and xyz,def)",
+            ""
+        };
+
+        [Test]
+        public void Test01Exceptions()
+        {
+            string m = ExceptionQueryTst.GetFailQueries(exceptionQueries, verbose);
+            if (m.Length > 0)
+            {
+                fail("No ParseException for:\n" + m);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.Tests.QueryParser/Surround/Query/Test02Boolean.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Surround/Query/Test02Boolean.cs b/Lucene.Net.Tests.QueryParser/Surround/Query/Test02Boolean.cs
new file mode 100644
index 0000000..aef9279
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Surround/Query/Test02Boolean.cs
@@ -0,0 +1,178 @@
+\ufeffusing Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class Test02Boolean : LuceneTestCase
+    {
+        //public static void Main(string[] args) {
+        //    TestRunner.run(new TestSuite(Test02Boolean.class));
+        //}
+
+        private readonly string fieldName = "bi";
+        private bool verbose = false;
+        private int maxBasicQueries = 16;
+
+        string[] docs1 = {
+            "word1 word2 word3",
+            "word4 word5",
+            "ord1 ord2 ord3",
+            "orda1 orda2 orda3 word2 worda3",
+            "a c e a b c"
+        };
+
+        public override void SetUp()
+        {
+            base.SetUp();
+            db1 = new SingleFieldTestDb(Random(), docs1, fieldName);
+        }
+
+        private SingleFieldTestDb db1;
+
+
+        public void NormalTest1(String query, int[] expdnrs)
+        {
+            BooleanQueryTst bqt = new BooleanQueryTst(query, expdnrs, db1, fieldName, this,
+                                                        new BasicQueryFactory(maxBasicQueries));
+            bqt.Verbose = (verbose);
+            bqt.DoTest();
+        }
+
+        [Test]
+        public void Test02Terms01()
+        {
+            int[] expdnrs = { 0 }; NormalTest1("word1", expdnrs);
+        }
+        [Test]
+        public void Test02Terms02()
+        {
+            int[] expdnrs = { 0, 1, 3 }; NormalTest1("word*", expdnrs);
+        }
+        [Test]
+        public void Test02Terms03()
+        {
+            int[] expdnrs = { 2 }; NormalTest1("ord2", expdnrs);
+        }
+        [Test]
+        public void Test02Terms04()
+        {
+            int[] expdnrs = { }; NormalTest1("kxork*", expdnrs);
+        }
+        [Test]
+        public void Test02Terms05()
+        {
+            int[] expdnrs = { 0, 1, 3 }; NormalTest1("wor*", expdnrs);
+        }
+        [Test]
+        public void Test02Terms06()
+        {
+            int[] expdnrs = { }; NormalTest1("ab", expdnrs);
+        }
+
+        [Test]
+        public void Test02Terms10()
+        {
+            int[] expdnrs = { }; NormalTest1("abc?", expdnrs);
+        }
+        [Test]
+        public void Test02Terms13()
+        {
+            int[] expdnrs = { 0, 1, 3 }; NormalTest1("word?", expdnrs);
+        }
+        [Test]
+        public void Test02Terms14()
+        {
+            int[] expdnrs = { 0, 1, 3 }; NormalTest1("w?rd?", expdnrs);
+        }
+        [Test]
+        public void Test02Terms20()
+        {
+            int[] expdnrs = { 0, 1, 3 }; NormalTest1("w*rd?", expdnrs);
+        }
+        [Test]
+        public void Test02Terms21()
+        {
+            int[] expdnrs = { 3 }; NormalTest1("w*rd??", expdnrs);
+        }
+        [Test]
+        public void Test02Terms22()
+        {
+            int[] expdnrs = { 3 }; NormalTest1("w*?da?", expdnrs);
+        }
+        [Test]
+        public void Test02Terms23()
+        {
+            int[] expdnrs = { }; NormalTest1("w?da?", expdnrs);
+        }
+
+        [Test]
+        public void Test03And01()
+        {
+            int[] expdnrs = { 0 }; NormalTest1("word1 AND word2", expdnrs);
+        }
+        [Test]
+        public void Test03And02()
+        {
+            int[] expdnrs = { 3 }; NormalTest1("word* and ord*", expdnrs);
+        }
+        [Test]
+        public void Test03And03()
+        {
+            int[] expdnrs = { 0 }; NormalTest1("and(word1,word2)", expdnrs);
+        }
+        [Test]
+        public void Test04Or01()
+        {
+            int[] expdnrs = { 0, 3 }; NormalTest1("word1 or word2", expdnrs);
+        }
+        [Test]
+        public void Test04Or02()
+        {
+            int[] expdnrs = { 0, 1, 2, 3 }; NormalTest1("word* OR ord*", expdnrs);
+        }
+        [Test]
+        public void Test04Or03()
+        {
+            int[] expdnrs = { 0, 3 }; NormalTest1("OR (word1, word2)", expdnrs);
+        }
+        [Test]
+        public void Test05Not01()
+        {
+            int[] expdnrs = { 3 }; NormalTest1("word2 NOT word1", expdnrs);
+        }
+        [Test]
+        public void Test05Not02()
+        {
+            int[] expdnrs = { 0 }; NormalTest1("word2* not ord*", expdnrs);
+        }
+        [Test]
+        public void Test06AndOr01()
+        {
+            int[] expdnrs = { 0 }; NormalTest1("(word1 or ab)and or(word2,xyz, defg)", expdnrs);
+        }
+        [Test]
+        public void Test07AndOrNot02()
+        {
+            int[] expdnrs = { 0 }; NormalTest1("or( word2* not ord*, and(xyz,def))", expdnrs);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/1e7576a6/Lucene.Net.Tests.QueryParser/Surround/Query/Test03Distance.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Surround/Query/Test03Distance.cs b/Lucene.Net.Tests.QueryParser/Surround/Query/Test03Distance.cs
new file mode 100644
index 0000000..6a19cb7
--- /dev/null
+++ b/Lucene.Net.Tests.QueryParser/Surround/Query/Test03Distance.cs
@@ -0,0 +1,341 @@
+\ufeffusing Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    [TestFixture]
+    public class Test03Distance : LuceneTestCase
+    {
+        //public static void Main(string[] args) {
+        //    TestRunner.run(new TestSuite(Test03Distance.class));
+        //}
+
+        private bool verbose = false;
+        private int maxBasicQueries = 16;
+
+        private string[] exceptionQueries = {
+            "(aa and bb) w cc",
+            "(aa or bb) w (cc and dd)",
+            "(aa opt bb) w cc",
+            "(aa not bb) w cc",
+            "(aa or bb) w (bi:cc)",
+            "(aa or bb) w bi:cc",
+            "(aa or bi:bb) w cc",
+            "(aa or (bi:bb)) w cc",
+            "(aa or (bb and dd)) w cc"
+        };
+
+        [Test]
+        public void Test00Exceptions()
+        {
+            string m = ExceptionQueryTst.GetFailQueries(exceptionQueries, verbose);
+            if (m.Length > 0)
+            {
+                fail("No ParseException for:\n" + m);
+            }
+        }
+
+        private readonly string fieldName = "bi";
+
+        private string[] docs1 = {
+            "word1 word2 word3",
+            "word4 word5",
+            "ord1 ord2 ord3",
+            "orda1 orda2 orda3 word2 worda3",
+            "a c e a b c"
+        };
+
+        SingleFieldTestDb db1;
+
+        public override void SetUp()
+        {
+            base.SetUp();
+            db1 = new SingleFieldTestDb(Random(), docs1, fieldName);
+            db2 = new SingleFieldTestDb(Random(), docs2, fieldName);
+            db3 = new SingleFieldTestDb(Random(), docs3, fieldName);
+        }
+
+        private void DistanceTst(String query, int[] expdnrs, SingleFieldTestDb db)
+        {
+            BooleanQueryTst bqt = new BooleanQueryTst(query, expdnrs, db, fieldName, this,
+                                                        new BasicQueryFactory(maxBasicQueries));
+            bqt.Verbose = (verbose);
+            bqt.DoTest();
+        }
+
+        public void DistanceTest1(string query, int[] expdnrs)
+        {
+            DistanceTst(query, expdnrs, db1);
+        }
+
+        [Test]
+        public void Test0W01()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word1 w word2", expdnrs);
+        }
+        [Test]
+        public void Test0N01()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word1 n word2", expdnrs);
+        }
+        [Test]
+        public void Test0N01r()
+        { /* r reverse */
+            int[] expdnrs = { 0 }; DistanceTest1("word2 n word1", expdnrs);
+        }
+        [Test]
+        public void Test0W02()
+        {
+            int[] expdnrs = { }; DistanceTest1("word2 w word1", expdnrs);
+        }
+        [Test]
+        public void Test0W03()
+        {
+            int[] expdnrs = { }; DistanceTest1("word2 2W word1", expdnrs);
+        }
+        [Test]
+        public void Test0N03()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word2 2N word1", expdnrs);
+        }
+        [Test]
+        public void Test0N03r()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word1 2N word2", expdnrs);
+        }
+
+        [Test]
+        public void Test0W04()
+        {
+            int[] expdnrs = { }; DistanceTest1("word2 3w word1", expdnrs);
+        }
+
+        [Test]
+        public void Test0N04()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word2 3n word1", expdnrs);
+        }
+        [Test]
+        public void Test0N04r()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word1 3n word2", expdnrs);
+        }
+
+        [Test]
+        public void Test0W05()
+        {
+            int[] expdnrs = { }; DistanceTest1("orda1 w orda3", expdnrs);
+        }
+        [Test]
+        public void Test0W06()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("orda1 2w orda3", expdnrs);
+        }
+
+        [Test]
+        public void Test1Wtrunc01()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word1* w word2", expdnrs);
+        }
+        [Test]
+        public void Test1Wtrunc02()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word* w word2", expdnrs);
+        }
+        [Test]
+        public void Test1Wtrunc02r()
+        {
+            int[] expdnrs = { 0, 3 }; DistanceTest1("word2 w word*", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc02()
+        {
+            int[] expdnrs = { 0, 3 }; DistanceTest1("word* n word2", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc02r()
+        {
+            int[] expdnrs = { 0, 3 }; DistanceTest1("word2 n word*", expdnrs);
+        }
+
+        [Test]
+        public void Test1Wtrunc03()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word1* w word2*", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc03()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word1* N word2*", expdnrs);
+        }
+
+        [Test]
+        public void Test1Wtrunc04()
+        {
+            int[] expdnrs = { }; DistanceTest1("kxork* w kxor*", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc04()
+        {
+            int[] expdnrs = { }; DistanceTest1("kxork* 99n kxor*", expdnrs);
+        }
+
+        [Test]
+        public void Test1Wtrunc05()
+        {
+            int[] expdnrs = { }; DistanceTest1("word2* 2W word1*", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc05()
+        {
+            int[] expdnrs = { 0 }; DistanceTest1("word2* 2N word1*", expdnrs);
+        }
+
+        [Test]
+        public void Test1Wtrunc06()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("ord* W word*", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc06()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("ord* N word*", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc06r()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("word* N ord*", expdnrs);
+        }
+
+        [Test]
+        public void Test1Wtrunc07()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("(orda2 OR orda3) W word*", expdnrs);
+        }
+        [Test]
+        public void Test1Wtrunc08()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("(orda2 OR orda3) W (word2 OR worda3)", expdnrs);
+        }
+        [Test]
+        public void Test1Wtrunc09()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("(orda2 OR orda3) 2W (word2 OR worda3)", expdnrs);
+        }
+        [Test]
+        public void Test1Ntrunc09()
+        {
+            int[] expdnrs = { 3 }; DistanceTest1("(orda2 OR orda3) 2N (word2 OR worda3)", expdnrs);
+        }
+
+        string[] docs2 = {
+            "w1 w2 w3 w4 w5",
+            "w1 w3 w2 w3",
+            ""
+        };
+
+        SingleFieldTestDb db2;
+
+        public void DistanceTest2(string query, int[] expdnrs)
+        {
+            DistanceTst(query, expdnrs, db2);
+        }
+
+        [Test]
+        public void Test2Wprefix01()
+        {
+            int[] expdnrs = { 0 }; DistanceTest2("W (w1, w2, w3)", expdnrs);
+        }
+        [Test]
+        public void Test2Nprefix01a()
+        {
+            int[] expdnrs = { 0, 1 }; DistanceTest2("N(w1, w2, w3)", expdnrs);
+        }
+        [Test]
+        public void Test2Nprefix01b()
+        {
+            int[] expdnrs = { 0, 1 }; DistanceTest2("N(w3, w1, w2)", expdnrs);
+        }
+
+        [Test]
+        public void Test2Wprefix02()
+        {
+            int[] expdnrs = { 0, 1 }; DistanceTest2("2W(w1,w2,w3)", expdnrs);
+        }
+
+        [Test]
+        public void Test2Nprefix02a()
+        {
+            int[] expdnrs = { 0, 1 }; DistanceTest2("2N(w1,w2,w3)", expdnrs);
+        }
+        [Test]
+        public void Test2Nprefix02b()
+        {
+            int[] expdnrs = { 0, 1 }; DistanceTest2("2N(w2,w3,w1)", expdnrs);
+        }
+
+        [Test]
+        public void Test2Wnested01()
+        {
+            int[] expdnrs = { 0 }; DistanceTest2("w1 W w2 W w3", expdnrs);
+        }
+        [Test]
+        public void Test2Nnested01()
+        {
+            int[] expdnrs = { 0 }; DistanceTest2("w1 N w2 N w3", expdnrs);
+        }
+
+        [Test]
+        public void Test2Wnested02()
+        {
+            int[] expdnrs = { 0, 1 }; DistanceTest2("w1 2W w2 2W w3", expdnrs);
+        }
+        [Test]
+        public void Test2Nnested02()
+        {
+            int[] expdnrs = { 0, 1 }; DistanceTest2("w1 2N w2 2N w3", expdnrs);
+        }
+
+        string[] docs3 = {
+            "low pressure temperature inversion and rain",
+            "when the temperature has a negative height above a depression no precipitation gradient is expected",
+            "when the temperature has a negative height gradient above a depression no precipitation is expected",
+            ""
+        };
+
+        SingleFieldTestDb db3;
+
+        public void DistanceTest3(string query, int[] expdnrs)
+        {
+            DistanceTst(query, expdnrs, db3);
+        }
+
+        [Test]
+        public void Test3Example01()
+        {
+            int[] expdnrs = { 0, 2 }; // query does not match doc 1 because "gradient" is in wrong place there.
+            DistanceTest3("50n((low w pressure*) or depression*,"
+                           + "5n(temperat*, (invers* or (negativ* 3n gradient*))),"
+                           + "rain* or precipitat*)",
+                           expdnrs);
+        }
+    }
+}


[08/50] [abbrv] lucenenet git commit: Fixed long/ulong casting issues in QueryParserTokenManager.

Posted by sy...@apache.org.
Fixed long/ulong casting issues in QueryParserTokenManager.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/4e04b59d
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/4e04b59d
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/4e04b59d

Branch: refs/heads/master
Commit: 4e04b59d242aad7638cce979355c6f386a6a3bfa
Parents: de9d52b
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Jul 31 20:12:20 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:30:06 2016 +0700

----------------------------------------------------------------------
 .../Classic/QueryParserTokenManager.cs          | 34 ++++++++++----------
 1 file changed, 17 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/4e04b59d/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs b/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
index e92bcb8..aac1505 100644
--- a/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
+++ b/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
@@ -134,16 +134,16 @@ namespace Lucene.Net.QueryParser.Classic
 						{
 							
 							case 49: 
-							case 33: 
-								if ((0xfbff7cf8ffffd9ffL & l) == (ulong) 0L)
+							case 33:
+                                if ((0xfbff7cf8ffffd9ffL & l) == (ulong)0L)
 									break;
 								if (kind > 23)
 									kind = 23;
 								JjCheckNAddTwoStates(33, 34);
 								break;
 							
-							case 0: 
-								if ((0xfbff54f8ffffd9ffL & l) != (ulong) 0L)
+							case 0:
+                                if ((0xfbff54f8ffffd9ffL & l) != (ulong)0L)
 								{
 									if (kind > 23)
 										kind = 23;
@@ -276,7 +276,7 @@ namespace Lucene.Net.QueryParser.Classic
 								break;
 							
 							case 32:
-                                if ((0xfbff54f8ffffd9ffL & l) == 0L)
+                                if ((0xfbff54f8ffffd9ffL & l) == (ulong)0L)
                                     break;
                                 if (kind > 23)
                                     kind = 23;
@@ -293,7 +293,7 @@ namespace Lucene.Net.QueryParser.Classic
                                     JjCheckNAddStates(0, 2);
                                 break;
                             case 37:
-                                if ((0xffff7fffffffffffL & l) != 0L)
+                                if ((0xffff7fffffffffffL & l) != (ulong)0L)
                                     JjCheckNAddStates(0, 2);
                                 break;
                             case 40:
@@ -496,14 +496,14 @@ namespace Lucene.Net.QueryParser.Classic
                                 JjCheckNAddTwoStates(28, 29);
                                 break;
                             case 32:
-                                if ((0x97ffffff87ffffffL & l) == 0L)
+                                if ((0x97ffffff87ffffffL & l) == (ulong)0L)
                                     break;
                                 if (kind > 23)
                                     kind = 23;
                                 JjCheckNAddTwoStates(33, 34);
                                 break;
                             case 33:
-                                if ((0x97ffffff87ffffffL & l) == 0L)
+                                if ((0x97ffffff87ffffffL & l) == (ulong)0L)
                                     break;
                                 if (kind > 23)
                                     kind = 23;
@@ -526,14 +526,14 @@ namespace Lucene.Net.QueryParser.Classic
                                     jjstateSet[jjnewStateCnt++] = 38;
                                 break;
                             case 41:
-                                if ((0x97ffffff87ffffffL & l) == 0L)
+                                if ((0x97ffffff87ffffffL & l) == (ulong)0L)
                                     break;
                                 if (kind > 20)
                                     kind = 20;
                                 JjCheckNAddStates(6, 10);
                                 break;
                             case 42:
-                                if ((0x97ffffff87ffffffL & l) == 0L)
+                                if ((0x97ffffff87ffffffL & l) == (ulong)0L)
                                     break;
                                 if (kind > 20)
                                     kind = 20;
@@ -549,7 +549,7 @@ namespace Lucene.Net.QueryParser.Classic
                                 JjCheckNAddTwoStates(42, 43);
                                 break;
                             case 45:
-                                if ((0x97ffffff87ffffffL & l) != 0L)
+                                if ((0x97ffffff87ffffffL & l) != (ulong)0L)
                                     JjCheckNAddStates(18, 20);
                                 break;
                             case 46:
@@ -927,8 +927,8 @@ namespace Lucene.Net.QueryParser.Classic
 						switch (jjstateSet[--i])
 						{
 							
-							case 0: 
-								if ((0xfffffffeffffffffL & l) != (ulong) 0L)
+							case 0:
+                                if ((0xfffffffeffffffffL & l) != (ulong)0L)
 								{
 									if (kind > 32)
 										kind = 32;
@@ -948,8 +948,8 @@ namespace Lucene.Net.QueryParser.Classic
 									JjCheckNAddTwoStates(2, 4);
 								break;
 							
-							case 2: 
-								if ((0xfffffffbffffffffL & l) != (ulong) 0L)
+							case 2:
+                                if ((0xfffffffbffffffffL & l) != (ulong)0L)
 									JjCheckNAddStates(33, 35);
 								break;
 							
@@ -963,8 +963,8 @@ namespace Lucene.Net.QueryParser.Classic
 									kind = 31;
 								break;
 							
-							case 6: 
-								if ((0xfffffffeffffffffL & l) == (ulong) 0L)
+							case 6:
+                                if ((0xfffffffeffffffffL & l) == (ulong)0L)
 									break;
 								if (kind > 32)
 									kind = 32;


[50/50] [abbrv] lucenenet git commit: Merge remote-tracking branch '888/queryparser'

Posted by sy...@apache.org.
Merge remote-tracking branch '888/queryparser'


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/1946bf82
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/1946bf82
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/1946bf82

Branch: refs/heads/master
Commit: 1946bf82cf7f9202f873617eebfed0913147faaf
Parents: 36dc55f ab5c059
Author: Itamar Syn-Hershko <it...@code972.com>
Authored: Mon Sep 12 00:30:11 2016 +0300
Committer: Itamar Syn-Hershko <it...@code972.com>
Committed: Mon Sep 12 00:30:11 2016 +0300

----------------------------------------------------------------------
 Lucene.Net.sln                                  |   24 +
 src/Lucene.Net.Core/Util/ToStringUtils.cs       |    6 +-
 .../Analyzing/AnalyzingQueryParser.cs           |  198 +++
 .../Classic/CharStream.cs                       |  134 ++
 .../Classic/FastCharStream.cs                   |  158 ++
 .../Classic/MultiFieldQueryParser.cs            |  403 +++++
 .../Classic/ParseException.cs                   |  234 +++
 .../Classic/QueryParser.cs                      |  923 +++++++++++
 .../Classic/QueryParserBase.cs                  | 1024 ++++++++++++
 .../Classic/QueryParserConstants.cs             |  222 +++
 .../Classic/QueryParserTokenManager.cs          | 1356 ++++++++++++++++
 src/Lucene.Net.QueryParser/Classic/Token.cs     |  142 ++
 .../Classic/TokenMgrError.cs                    |  170 ++
 .../ComplexPhrase/ComplexPhraseQueryParser.cs   |  468 ++++++
 .../Ext/ExtendableQueryParser.cs                |  131 ++
 .../Ext/ExtensionQuery.cs                       |   54 +
 src/Lucene.Net.QueryParser/Ext/Extensions.cs    |  167 ++
 .../Ext/ParserExtension.cs                      |   50 +
 .../Standard/CommonQueryParserConfiguration.cs  |  106 ++
 .../Lucene.Net.QueryParser.csproj               |  107 ++
 .../Properties/AssemblyInfo.cs                  |   39 +
 .../Simple/SimpleQueryParser.cs                 |  798 +++++++++
 .../Surround/Parser/CharStream.cs               |  134 ++
 .../Surround/Parser/FastCharStream.cs           |  158 ++
 .../Surround/Parser/ParseException.cs           |  234 +++
 .../Surround/Parser/QueryParser.cs              |  910 +++++++++++
 .../Surround/Parser/QueryParserConstants.cs     |  118 ++
 .../Surround/Parser/QueryParserTokenManager.cs  |  759 +++++++++
 .../Surround/Parser/Token.cs                    |  142 ++
 .../Surround/Parser/TokenMgrError.cs            |  170 ++
 .../Surround/Query/AndQuery.cs                  |   39 +
 .../Surround/Query/BasicQueryFactory.cs         |  110 ++
 .../Surround/Query/ComposedQuery.cs             |  144 ++
 .../Surround/Query/DistanceQuery.cs             |  117 ++
 .../Surround/Query/DistanceRewriteQuery.cs      |   35 +
 .../Surround/Query/DistanceSubQuery.cs          |   36 +
 .../Surround/Query/FieldsQuery.cs               |  105 ++
 .../Surround/Query/NotQuery.cs                  |   48 +
 .../Surround/Query/OrQuery.cs                   |   71 +
 .../Surround/Query/RewriteQuery.cs              |   85 +
 .../Surround/Query/SimpleTerm.cs                |  118 ++
 .../Surround/Query/SimpleTermRewriteQuery.cs    |   64 +
 .../Surround/Query/SpanNearClauseFactory.cs     |  124 ++
 .../Surround/Query/SrndBooleanQuery.cs          |   51 +
 .../Surround/Query/SrndPrefixQuery.cs           |  108 ++
 .../Surround/Query/SrndQuery.cs                 |  148 ++
 .../Surround/Query/SrndTermQuery.cs             |   63 +
 .../Surround/Query/SrndTruncQuery.cs            |  139 ++
 .../Surround/Query/TooManyBasicQueries.cs       |   30 +
 .../Analyzing/TestAnalyzingQueryParser.cs       |  343 ++++
 .../Classic/TestMultiAnalyzer.cs                |  278 ++++
 .../Classic/TestMultiFieldQueryParser.cs        |  376 +++++
 .../Classic/TestMultiPhraseQueryParsing.cs      |  121 ++
 .../Classic/TestQueryParser.cs                  |  564 +++++++
 .../ComplexPhrase/TestComplexPhraseQuery.cs     |  214 +++
 .../Ext/ExtensionStub.cs                        |   30 +
 .../Ext/TestExtendableQueryParser.cs            |  145 ++
 .../Ext/TestExtensions.cs                       |   97 ++
 .../Lucene.Net.Tests.QueryParser.csproj         |   94 ++
 .../Properties/AssemblyInfo.cs                  |   36 +
 .../Simple/TestSimpleQueryParser.cs             |  728 +++++++++
 .../Surround/Query/BooleanQueryTst.cs           |  142 ++
 .../Surround/Query/ExceptionQueryTst.cs         |   76 +
 .../Surround/Query/SingleFieldTestDb.cs         |   55 +
 .../Surround/Query/SrndQueryTest.cs             |   48 +
 .../Surround/Query/Test01Exceptions.cs          |   72 +
 .../Surround/Query/Test02Boolean.cs             |  178 ++
 .../Surround/Query/Test03Distance.cs            |  341 ++++
 .../Util/QueryParserTestBase.cs                 | 1523 ++++++++++++++++++
 .../packages.config                             |    4 +
 src/Lucene.Net.Tests/Lucene.Net.Tests.csproj    |    1 +
 .../core/Support/TestToStringUtils.cs           |   55 +
 72 files changed, 16692 insertions(+), 3 deletions(-)
----------------------------------------------------------------------



[07/50] [abbrv] lucenenet git commit: Fixed test name casing.

Posted by sy...@apache.org.
Fixed test name casing.


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/de9d52b3
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/de9d52b3
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/de9d52b3

Branch: refs/heads/master
Commit: de9d52b30e8768f0cf56e31958aa98bde5137397
Parents: cfacfbf
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Sun Jul 31 19:20:57 2016 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Fri Sep 2 22:30:01 2016 +0700

----------------------------------------------------------------------
 Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/de9d52b3/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
----------------------------------------------------------------------
diff --git a/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs b/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
index 282b355..bcf8792 100644
--- a/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
+++ b/Lucene.Net.Tests.QueryParser/Util/QueryParserTestBase.cs
@@ -370,7 +370,7 @@ namespace Lucene.Net.QueryParser.Util
         }
 
         [Test]
-        public void testCJKSloppyPhrase()
+        public void TestCJKSloppyPhrase()
         {
             // individual CJK chars as terms
             SimpleCJKAnalyzer analyzer = new SimpleCJKAnalyzer();