You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by sy...@apache.org on 2016/09/11 21:30:54 UTC
[23/50] [abbrv] lucenenet git commit: Moved Lucene.Net.QueryParser
and Lucene.Net.Tests.QueryParser projects into src\ directory.
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
new file mode 100644
index 0000000..49ef7d4
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
@@ -0,0 +1,912 @@
+\ufeffusing Lucene.Net.QueryParser.Surround.Query;
+using System;
+using System.Collections.Generic;
+using System.IO;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+ /*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ /// <summary>
+ /// This class is generated by JavaCC. The only method that clients should need
+ /// to call is {@link #parse parse()}.
+ ///
+
+ /// <p>This parser generates queries that make use of position information
+ /// (Span queries). It provides positional operators (<code>w</code> and
+ /// <code>n</code>) that accept a numeric distance, as well as boolean
+ /// operators (<code>and</code>, <code>or</code>, and <code>not</code>,
+ /// wildcards (<code>///</code> and <code>?</code>), quoting (with
+ /// <code>"</code>), and boosting (via <code>^</code>).</p>
+
+ /// <p>The operators (W, N, AND, OR, NOT) can be expressed lower-cased or
+ /// upper-cased, and the non-unary operators (everything but NOT) support
+ /// both infix <code>(a AND b AND c)</code> and prefix <code>AND(a, b,
+ /// c)</code> notation. </p>
+
+ /// <p>The W and N operators express a positional relationship among their
+ /// operands. N is ordered, and W is unordered. The distance is 1 by
+ /// default, meaning the operands are adjacent, or may be provided as a
+ /// prefix from 2-99. So, for example, 3W(a, b) means that terms a and b
+ /// must appear within three positions of each other, or in other words, up
+ /// to two terms may appear between a and b. </p>
+ /// </summary>
+ public class QueryParser
+ {
+ internal readonly int minimumPrefixLength = 3;
+ internal readonly int minimumCharsInTrunc = 3;
+ internal readonly string truncationErrorMessage = "Too unrestrictive truncation: ";
+ internal readonly string boostErrorMessage = "Cannot handle boost value: ";
+
+ /* CHECKME: These should be the same as for the tokenizer. How? */
+ internal readonly char truncator = '*';
+ internal readonly char anyChar = '?';
+ internal readonly char quote = '"';
+ internal readonly char fieldOperator = ':';
+ internal readonly char comma = ','; /* prefix list separator */
+ internal readonly char carat = '^'; /* weight operator */
+
+ public static SrndQuery Parse(string query)
+ {
+ QueryParser parser = new QueryParser();
+ return parser.Parse2(query);
+ }
+
+ public QueryParser()
+ : this(new FastCharStream(new StringReader("")))
+ {
+ }
+
+ public virtual SrndQuery Parse2(string query)
+ {
+ ReInit(new FastCharStream(new StringReader(query)));
+ try
+ {
+ return TopSrndQuery();
+ }
+ catch (TokenMgrError tme)
+ {
+ throw new ParseException(tme.Message);
+ }
+ }
+
+ protected virtual SrndQuery GetFieldsQuery(
+ SrndQuery q, IEnumerable<string> fieldNames)
+ {
+ /* FIXME: check acceptable subquery: at least one subquery should not be
+ * a fields query.
+ */
+ return new FieldsQuery(q, fieldNames, fieldOperator);
+ }
+
+ protected virtual SrndQuery GetOrQuery(IEnumerable<SrndQuery> queries, bool infix, Token orToken)
+ {
+ return new OrQuery(queries, infix, orToken.image);
+ }
+
+ protected virtual SrndQuery GetAndQuery(IEnumerable<SrndQuery> queries, bool infix, Token andToken)
+ {
+ return new AndQuery(queries, infix, andToken.image);
+ }
+
+ protected virtual SrndQuery GetNotQuery(IEnumerable<SrndQuery> queries, Token notToken)
+ {
+ return new NotQuery(queries, notToken.image);
+ }
+
+ protected static int GetOpDistance(string distanceOp)
+ {
+ /* W, 2W, 3W etc -> 1, 2 3, etc. Same for N, 2N ... */
+ return distanceOp.Length == 1
+ ? 1
+ : int.Parse(distanceOp.Substring(0, distanceOp.Length - 1));
+ }
+
+ protected static void CheckDistanceSubQueries(DistanceQuery distq, string opName)
+ {
+ string m = distq.DistanceSubQueryNotAllowed();
+ if (m != null)
+ {
+ throw new ParseException("Operator " + opName + ": " + m);
+ }
+ }
+
+ protected virtual SrndQuery GetDistanceQuery(
+ IEnumerable<SrndQuery> queries,
+ bool infix,
+ Token dToken,
+ bool ordered)
+ {
+ DistanceQuery dq = new DistanceQuery(queries,
+ infix,
+ GetOpDistance(dToken.image),
+ dToken.image,
+ ordered);
+ CheckDistanceSubQueries(dq, dToken.image);
+ return dq;
+ }
+
+ protected virtual SrndQuery GetTermQuery(
+ String term, bool quoted)
+ {
+ return new SrndTermQuery(term, quoted);
+ }
+
+ protected virtual bool AllowedSuffix(String suffixed)
+ {
+ return (suffixed.Length - 1) >= minimumPrefixLength;
+ }
+
+ protected virtual SrndQuery GetPrefixQuery(
+ string prefix, bool quoted)
+ {
+ return new SrndPrefixQuery(prefix, quoted, truncator);
+ }
+
+ protected virtual bool AllowedTruncation(string truncated)
+ {
+ /* At least 3 normal characters needed. */
+ int nrNormalChars = 0;
+ for (int i = 0; i < truncated.Length; i++)
+ {
+ char c = truncated[i];
+ if ((c != truncator) && (c != anyChar))
+ {
+ nrNormalChars++;
+ }
+ }
+ return nrNormalChars >= minimumCharsInTrunc;
+ }
+
+ protected virtual SrndQuery GetTruncQuery(string truncated)
+ {
+ return new SrndTruncQuery(truncated, truncator, anyChar);
+ }
+
+ public SrndQuery TopSrndQuery()
+ {
+ SrndQuery q;
+ q = FieldsQuery();
+ Jj_consume_token(0);
+ { if (true) return q; }
+ throw new Exception("Missing return statement in function");
+ }
+
+ public SrndQuery FieldsQuery()
+ {
+ SrndQuery q;
+ IEnumerable<string> fieldNames;
+ fieldNames = OptionalFields();
+ q = OrQuery();
+ { if (true) return (fieldNames == null) ? q : GetFieldsQuery(q, fieldNames); }
+ throw new Exception("Missing return statement in function");
+ }
+
+ public IEnumerable<string> OptionalFields()
+ {
+ Token fieldName;
+ IList<string> fieldNames = null;
+
+ while (true)
+ {
+ if (Jj_2_1(2))
+ {
+ ;
+ }
+ else
+ {
+ goto label_1;
+ }
+ // to the colon
+ fieldName = Jj_consume_token(RegexpToken.TERM);
+ Jj_consume_token(RegexpToken.COLON);
+ if (fieldNames == null)
+ {
+ fieldNames = new List<string>();
+ }
+ fieldNames.Add(fieldName.image);
+ }
+ label_1:
+ { if (true) return fieldNames; }
+ throw new Exception("Missing return statement in function");
+ }
+
+ public SrndQuery OrQuery()
+ {
+ SrndQuery q;
+ IList<SrndQuery> queries = null;
+ Token oprt = null;
+ q = AndQuery();
+
+ while (true)
+ {
+ switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+ {
+ case RegexpToken.OR:
+ ;
+ break;
+ default:
+ jj_la1[0] = jj_gen;
+ goto label_2;
+ }
+ oprt = Jj_consume_token(RegexpToken.OR);
+ /* keep only last used operator */
+ if (queries == null)
+ {
+ queries = new List<SrndQuery>();
+ queries.Add(q);
+ }
+ q = AndQuery();
+ queries.Add(q);
+ }
+ label_2:
+ { if (true) return (queries == null) ? q : GetOrQuery(queries, true /* infix */, oprt); }
+ throw new Exception("Missing return statement in function");
+ }
+
+ public SrndQuery AndQuery()
+ {
+ SrndQuery q;
+ IList<SrndQuery> queries = null;
+ Token oprt = null;
+ q = NotQuery();
+
+ while (true)
+ {
+ switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+ {
+ case RegexpToken.AND:
+ ;
+ break;
+ default:
+ jj_la1[1] = jj_gen;
+ goto label_3;
+ }
+ oprt = Jj_consume_token(RegexpToken.AND);
+ /* keep only last used operator */
+ if (queries == null)
+ {
+ queries = new List<SrndQuery>();
+ queries.Add(q);
+ }
+ q = NotQuery();
+ queries.Add(q);
+ }
+ label_3:
+ { if (true) return (queries == null) ? q : GetAndQuery(queries, true /* infix */, oprt); }
+ throw new Exception("Missing return statement in function");
+ }
+
+ public SrndQuery NotQuery()
+ {
+ SrndQuery q;
+ IList<SrndQuery> queries = null;
+ Token oprt = null;
+ q = NQuery();
+
+ while (true)
+ {
+ switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+ {
+ case RegexpToken.NOT:
+ ;
+ break;
+ default:
+ jj_la1[2] = jj_gen;
+ goto label_4;
+ }
+ oprt = Jj_consume_token(RegexpToken.NOT);
+ /* keep only last used operator */
+ if (queries == null)
+ {
+ queries = new List<SrndQuery>();
+ queries.Add(q);
+ }
+ q = NQuery();
+ queries.Add(q);
+ }
+ label_4:
+ { if (true) return (queries == null) ? q : GetNotQuery(queries, oprt); }
+ throw new Exception("Missing return statement in function");
+ }
+
+ public SrndQuery NQuery()
+ {
+ SrndQuery q;
+ IList<SrndQuery> queries;
+ Token dt;
+ q = WQuery();
+
+ while (true)
+ {
+ switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+ {
+ case RegexpToken.N:
+ ;
+ break;
+ default:
+ jj_la1[3] = jj_gen;
+ goto label_5;
+ }
+ dt = Jj_consume_token(RegexpToken.N);
+ queries = new List<SrndQuery>();
+ queries.Add(q); /* left associative */
+
+ q = WQuery();
+ queries.Add(q);
+ q = GetDistanceQuery(queries, true /* infix */, dt, false /* not ordered */);
+ }
+ label_5:
+ { if (true) return q; }
+ throw new Exception("Missing return statement in function");
+ }
+
+ public SrndQuery WQuery()
+ {
+ SrndQuery q;
+ IList<SrndQuery> queries;
+ Token wt;
+ q = PrimaryQuery();
+
+ while (true)
+ {
+ switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+ {
+ case RegexpToken.W:
+ ;
+ break;
+ default:
+ jj_la1[4] = jj_gen;
+ goto label_6;
+ }
+ wt = Jj_consume_token(RegexpToken.W);
+ queries = new List<SrndQuery>();
+ queries.Add(q); /* left associative */
+
+ q = PrimaryQuery();
+ queries.Add(q);
+ q = GetDistanceQuery(queries, true /* infix */, wt, true /* ordered */);
+ }
+ label_6:
+ { if (true) return q; }
+ throw new Exception("Missing return statement in function");
+ }
+
+ public SrndQuery PrimaryQuery()
+ {
+ /* bracketed weighted query or weighted term */
+ SrndQuery q;
+ switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+ {
+ case RegexpToken.LPAREN:
+ Jj_consume_token(RegexpToken.LPAREN);
+ q = FieldsQuery();
+ Jj_consume_token(RegexpToken.RPAREN);
+ break;
+ case RegexpToken.OR:
+ case RegexpToken.AND:
+ case RegexpToken.W:
+ case RegexpToken.N:
+ q = PrefixOperatorQuery();
+ break;
+ case RegexpToken.TRUNCQUOTED:
+ case RegexpToken.QUOTED:
+ case RegexpToken.SUFFIXTERM:
+ case RegexpToken.TRUNCTERM:
+ case RegexpToken.TERM:
+ q = SimpleTerm();
+ break;
+ default:
+ jj_la1[5] = jj_gen;
+ Jj_consume_token(-1);
+ throw new ParseException();
+ }
+ OptionalWeights(q);
+ { if (true) return q; }
+ throw new Exception("Missing return statement in function");
+ }
+
+ public SrndQuery PrefixOperatorQuery()
+ {
+ Token oprt;
+ IEnumerable<SrndQuery> queries;
+ switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+ {
+ case RegexpToken.OR:
+ oprt = Jj_consume_token(RegexpToken.OR);
+ /* prefix OR */
+ queries = FieldsQueryList();
+ { if (true) return GetOrQuery(queries, false /* not infix */, oprt); }
+ break;
+ case RegexpToken.AND:
+ oprt = Jj_consume_token(RegexpToken.AND);
+ /* prefix AND */
+ queries = FieldsQueryList();
+ { if (true) return GetAndQuery(queries, false /* not infix */, oprt); }
+ break;
+ case RegexpToken.N:
+ oprt = Jj_consume_token(RegexpToken.N);
+ /* prefix N */
+ queries = FieldsQueryList();
+ { if (true) return GetDistanceQuery(queries, false /* not infix */, oprt, false /* not ordered */); }
+ break;
+ case RegexpToken.W:
+ oprt = Jj_consume_token(RegexpToken.W);
+ /* prefix W */
+ queries = FieldsQueryList();
+ { if (true) return GetDistanceQuery(queries, false /* not infix */, oprt, true /* ordered */); }
+ break;
+ default:
+ jj_la1[6] = jj_gen;
+ Jj_consume_token(-1);
+ throw new ParseException();
+ }
+ throw new Exception("Missing return statement in function");
+ }
+
+ public IEnumerable<SrndQuery> FieldsQueryList()
+ {
+ SrndQuery q;
+ IList<SrndQuery> queries = new List<SrndQuery>();
+ Jj_consume_token(RegexpToken.LPAREN);
+ q = FieldsQuery();
+ queries.Add(q);
+
+ while (true)
+ {
+ Jj_consume_token(RegexpToken.COMMA);
+ q = FieldsQuery();
+ queries.Add(q);
+ switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+ {
+ case RegexpToken.COMMA:
+ ;
+ break;
+ default:
+ jj_la1[7] = jj_gen;
+ goto label_7;
+ }
+ }
+ label_7:
+ Jj_consume_token(RegexpToken.RPAREN);
+ { if (true) return queries; }
+ throw new Exception("Missing return statement in function");
+ }
+
+ public SrndQuery SimpleTerm()
+ {
+ Token term;
+ switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+ {
+ case RegexpToken.TERM:
+ term = Jj_consume_token(RegexpToken.TERM);
+ { if (true) return GetTermQuery(term.image, false /* not quoted */); }
+ break;
+ case RegexpToken.QUOTED:
+ term = Jj_consume_token(RegexpToken.QUOTED);
+ // TODO: Substring fix
+ { if (true) return GetTermQuery(term.image.Substring(1, (term.image.Length - 1) - 1), true /* quoted */); }
+ break;
+ case RegexpToken.SUFFIXTERM:
+ term = Jj_consume_token(RegexpToken.SUFFIXTERM);
+ /* ending in * */
+ if (!AllowedSuffix(term.image))
+ {
+ { if (true) throw new ParseException(truncationErrorMessage + term.image); }
+ }
+ // TODO: Substring fix
+ { if (true) return GetPrefixQuery(term.image.Substring(0, term.image.Length - 1), false /* not quoted */); }
+ break;
+ case RegexpToken.TRUNCTERM:
+ term = Jj_consume_token(RegexpToken.TRUNCTERM);
+ /* with at least one * or ? */
+ if (!AllowedTruncation(term.image))
+ {
+ { if (true) throw new ParseException(truncationErrorMessage + term.image); }
+ }
+ { if (true) return GetTruncQuery(term.image); }
+ break;
+ case RegexpToken.TRUNCQUOTED:
+ term = Jj_consume_token(RegexpToken.TRUNCQUOTED);
+ /* eg. "9b-b,m"* */
+ if ((term.image.Length - 3) < minimumPrefixLength)
+ {
+ { if (true) throw new ParseException(truncationErrorMessage + term.image); }
+ }
+ // TODO: Substring fix
+ { if (true) return GetPrefixQuery(term.image.Substring(1, (term.image.Length - 2) - 1), true /* quoted */); }
+ break;
+ default:
+ jj_la1[8] = jj_gen;
+ Jj_consume_token(-1);
+ throw new ParseException();
+ }
+ throw new Exception("Missing return statement in function");
+ }
+
+ public void OptionalWeights(SrndQuery q)
+ {
+ Token weight = null;
+
+ while (true)
+ {
+ switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
+ {
+ case RegexpToken.CARAT:
+ ;
+ break;
+ default:
+ jj_la1[9] = jj_gen;
+ goto label_8;
+ }
+ Jj_consume_token(RegexpToken.CARAT);
+ weight = Jj_consume_token(RegexpToken.NUMBER);
+ float f;
+ try
+ {
+ // TODO: Test parsing float in various cultures (.NET)
+ f = float.Parse(weight.image);
+ }
+ catch (Exception floatExc)
+ {
+ { if (true) throw new ParseException(boostErrorMessage + weight.image + " (" + floatExc + ")"); }
+ }
+ if (f <= 0.0)
+ {
+ { if (true) throw new ParseException(boostErrorMessage + weight.image); }
+ }
+ q.Weight = (f * q.Weight); /* left associative, fwiw */
+ }
+ label_8: ;
+ }
+
+ private bool Jj_2_1(int xla)
+ {
+ jj_la = xla; jj_lastpos = jj_scanpos = token;
+ try { return !Jj_3_1(); }
+ catch (LookaheadSuccess) { return true; }
+ finally { Jj_save(0, xla); }
+ }
+
+ private bool Jj_3_1()
+ {
+ if (Jj_scan_token(RegexpToken.TERM)) return true;
+ if (Jj_scan_token(RegexpToken.COLON)) return true;
+ return false;
+ }
+
+ /** Generated Token Manager. */
+ public QueryParserTokenManager token_source;
+ /** Current token. */
+ public Token token;
+ /** Next token. */
+ public Token jj_nt;
+ private int jj_ntk;
+ private Token jj_scanpos, jj_lastpos;
+ private int jj_la;
+ private int jj_gen;
+ private readonly int[] jj_la1 = new int[10];
+ private static int[] jj_la1_0;
+ static QueryParser()
+ {
+ Jj_la1_init_0();
+ }
+
+ private static void Jj_la1_init_0()
+ {
+ jj_la1_0 = new int[] { 0x100, 0x200, 0x400, 0x1000, 0x800, 0x7c3b00, 0x1b00, 0x8000, 0x7c0000, 0x20000, };
+ }
+ private readonly JJCalls[] jj_2_rtns = new JJCalls[1];
+ private bool jj_rescan = false;
+ private int jj_gc = 0;
+
+ /// <summary>
+ /// Constructor with user supplied CharStream.
+ /// </summary>
+ /// <param name="stream"></param>
+ public QueryParser(ICharStream stream)
+ {
+ token_source = new QueryParserTokenManager(stream);
+ token = new Token();
+ jj_ntk = -1;
+ jj_gen = 0;
+ for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+ for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+ }
+
+ /// <summary>
+ /// Reinitialise.
+ /// </summary>
+ /// <param name="stream"></param>
+ public virtual void ReInit(ICharStream stream)
+ {
+ token_source.ReInit(stream);
+ token = new Token();
+ jj_ntk = -1;
+ jj_gen = 0;
+ for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+ for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+ }
+
+ /// <summary>
+ /// Constructor with generated Token Manager.
+ /// </summary>
+ /// <param name="tm"></param>
+ public QueryParser(QueryParserTokenManager tm)
+ {
+ token_source = tm;
+ token = new Token();
+ jj_ntk = -1;
+ jj_gen = 0;
+ for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+ for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+ }
+
+ /// <summary>
+ /// Reinitialise.
+ /// </summary>
+ /// <param name="tm"></param>
+ public virtual void ReInit(QueryParserTokenManager tm)
+ {
+ token_source = tm;
+ token = new Token();
+ jj_ntk = -1;
+ jj_gen = 0;
+ for (int i = 0; i < 10; i++) jj_la1[i] = -1;
+ for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
+ }
+
+ private Token Jj_consume_token(int kind)
+ {
+ Token oldToken;
+ if ((oldToken = token).next != null) token = token.next;
+ else token = token.next = token_source.GetNextToken();
+ jj_ntk = -1;
+ if (token.kind == kind)
+ {
+ jj_gen++;
+ if (++jj_gc > 100)
+ {
+ jj_gc = 0;
+ for (int i = 0; i < jj_2_rtns.Length; i++)
+ {
+ JJCalls c = jj_2_rtns[i];
+ while (c != null)
+ {
+ if (c.gen < jj_gen) c.first = null;
+ c = c.next;
+ }
+ }
+ }
+ return token;
+ }
+ token = oldToken;
+ jj_kind = kind;
+ throw GenerateParseException();
+ }
+
+ private sealed class LookaheadSuccess : Exception { }
+ private readonly LookaheadSuccess jj_ls = new LookaheadSuccess();
+
+ private bool Jj_scan_token(int kind)
+ {
+ if (jj_scanpos == jj_lastpos)
+ {
+ jj_la--;
+ if (jj_scanpos.next == null)
+ {
+ jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.GetNextToken();
+ }
+ else
+ {
+ jj_lastpos = jj_scanpos = jj_scanpos.next;
+ }
+ }
+ else
+ {
+ jj_scanpos = jj_scanpos.next;
+ }
+ if (jj_rescan)
+ {
+ int i = 0; Token tok = token;
+ while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; }
+ if (tok != null) Jj_add_error_token(kind, i);
+ }
+ if (jj_scanpos.kind != kind) return true;
+ if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;
+ return false;
+ }
+
+ /// <summary>
+ /// Get the next Token.
+ /// </summary>
+ /// <returns></returns>
+ public Token GetNextToken()
+ {
+ if (token.next != null) token = token.next;
+ else token = token.next = token_source.GetNextToken();
+ jj_ntk = -1;
+ jj_gen++;
+ return token;
+ }
+
+ /// <summary>
+ /// Get the specific Token.
+ /// </summary>
+ /// <param name="index"></param>
+ /// <returns></returns>
+ public Token GetToken(int index)
+ {
+ Token t = token;
+ for (int i = 0; i < index; i++)
+ {
+ if (t.next != null) t = t.next;
+ else t = t.next = token_source.GetNextToken();
+ }
+ return t;
+ }
+
+ private int Jj_ntk()
+ {
+ if ((jj_nt = token.next) == null)
+ return (jj_ntk = (token.next = token_source.GetNextToken()).kind);
+ else
+ return (jj_ntk = jj_nt.kind);
+ }
+
+ private IList<int[]> jj_expentries = new List<int[]>();
+ private int[] jj_expentry;
+ private int jj_kind = -1;
+ private int[] jj_lasttokens = new int[100];
+ private int jj_endpos;
+
+ private void Jj_add_error_token(int kind, int pos)
+ {
+ if (pos >= 100) return;
+ if (pos == jj_endpos + 1)
+ {
+ jj_lasttokens[jj_endpos++] = kind;
+ }
+ else if (jj_endpos != 0)
+ {
+ jj_expentry = new int[jj_endpos];
+ for (int i = 0; i < jj_endpos; i++)
+ {
+ jj_expentry[i] = jj_lasttokens[i];
+ }
+ foreach (var oldentry in jj_expentries)
+ {
+ if (oldentry.Length == jj_expentry.Length)
+ {
+ for (int i = 0; i < jj_expentry.Length; i++)
+ {
+ if (oldentry[i] != jj_expentry[i])
+ {
+ continue;
+ }
+ }
+ jj_expentries.Add(jj_expentry);
+ break;
+ }
+ }
+ if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind;
+ }
+ }
+
+ /// <summary>
+ /// Generate ParseException.
+ /// </summary>
+ /// <returns></returns>
+ public virtual ParseException GenerateParseException()
+ {
+ jj_expentries.Clear();
+ bool[] la1tokens = new bool[24];
+ if (jj_kind >= 0)
+ {
+ la1tokens[jj_kind] = true;
+ jj_kind = -1;
+ }
+ for (int i = 0; i < 10; i++)
+ {
+ if (jj_la1[i] == jj_gen)
+ {
+ for (int j = 0; j < 32; j++)
+ {
+ if ((jj_la1_0[i] & (1 << j)) != 0)
+ {
+ la1tokens[j] = true;
+ }
+ }
+ }
+ }
+ for (int i = 0; i < 24; i++)
+ {
+ if (la1tokens[i])
+ {
+ jj_expentry = new int[1];
+ jj_expentry[0] = i;
+ jj_expentries.Add(jj_expentry);
+ }
+ }
+ jj_endpos = 0;
+ Jj_rescan_token();
+ Jj_add_error_token(0, 0);
+ int[][] exptokseq = new int[jj_expentries.Count][];
+ for (int i = 0; i < jj_expentries.Count; i++)
+ {
+ exptokseq[i] = jj_expentries[i];
+ }
+ return new ParseException(token, exptokseq, QueryParserConstants.TokenImage);
+ }
+
+ /// <summary>Enable tracing. </summary>
+ public void Enable_tracing()
+ {
+ }
+
+ /// <summary>Disable tracing. </summary>
+ public void Disable_tracing()
+ {
+ }
+
+ private void Jj_rescan_token()
+ {
+ jj_rescan = true;
+ for (int i = 0; i < 1; i++)
+ {
+ try
+ {
+ JJCalls p = jj_2_rtns[i];
+ do
+ {
+ if (p.gen > jj_gen)
+ {
+ jj_la = p.arg; jj_lastpos = jj_scanpos = p.first;
+ switch (i)
+ {
+ case 0: Jj_3_1(); break;
+ }
+ }
+ p = p.next;
+ } while (p != null);
+ }
+ catch (LookaheadSuccess ls) { }
+ }
+ jj_rescan = false;
+ }
+
+ private void Jj_save(int index, int xla)
+ {
+ JJCalls p = jj_2_rtns[index];
+ while (p.gen > jj_gen)
+ {
+ if (p.next == null) { p = p.next = new JJCalls(); break; }
+ p = p.next;
+ }
+ p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla;
+ }
+
+ internal sealed class JJCalls
+ {
+ internal int gen;
+ internal Token first;
+ internal int arg;
+ internal JJCalls next;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
new file mode 100644
index 0000000..262f76b
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserConstants.cs
@@ -0,0 +1,120 @@
+\ufeffusing System;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+ /*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ public static class RegexpToken
+ {
+ /// <summary>End of File. </summary>
+ public const int EOF = 0;
+ /// <summary>RegularExpression Id. </summary>
+ public const int _NUM_CHAR = 1;
+ /// <summary>RegularExpression Id. </summary>
+ public const int _TERM_CHAR = 2;
+ /// <summary>RegularExpression Id. </summary>
+ public const int _WHITESPACE = 3;
+ /// <summary>RegularExpression Id. </summary>
+ public const int _STAR = 4;
+ /// <summary>RegularExpression Id. </summary>
+ public const int _ONE_CHAR = 5;
+ /// <summary>RegularExpression Id. </summary>
+ public const int _DISTOP_NUM = 6;
+ /// <summary>RegularExpression Id. </summary>
+ public const int OR = 8;
+ /// <summary>RegularExpression Id. </summary>
+ public const int AND = 9;
+ /// <summary>RegularExpression Id. </summary>
+ public const int NOT = 10;
+ /// <summary>RegularExpression Id. </summary>
+ public const int W = 11;
+ /// <summary>RegularExpression Id. </summary>
+ public const int N = 12;
+ /// <summary>RegularExpression Id. </summary>
+ public const int LPAREN = 13;
+ /// <summary>RegularExpression Id. </summary>
+ public const int RPAREN = 14;
+ /// <summary>RegularExpression Id. </summary>
+ public const int COMMA = 15;
+ /// <summary>RegularExpression Id. </summary>
+ public const int COLON = 16;
+ /// <summary>RegularExpression Id. </summary>
+ public const int CARAT = 17;
+ /// <summary>RegularExpression Id. </summary>
+ public const int TRUNCQUOTED = 18;
+ /// <summary>RegularExpression Id. </summary>
+ public const int QUOTED = 19;
+ /// <summary>RegularExpression Id. </summary>
+ public const int SUFFIXTERM = 20;
+ /// <summary>RegularExpression Id. </summary>
+ public const int TRUNCTERM = 21;
+ /// <summary>RegularExpression Id. </summary>
+ public const int TERM = 22;
+ /// <summary>RegularExpression Id. </summary>
+ public const int NUMBER = 23;
+ }
+
+ public static class LexicalToken
+ {
+ /// <summary>Lexical state.</summary>
+ public const int Boost = 0;
+ /// <summary>Lexical state.</summary>
+ public const int DEFAULT = 2;
+ }
+
+ // NOTE: In Java, this was an interface. However, in
+ // .NET we cannot define constants in an interface.
+ // So, instead we are making it a static class so it
+ // can be shared between classes with different base classes.
+
+ // public interface QueryParserConstants
+
+ /// <summary> Token literal values and constants.
+ /// Generated by org.javacc.parser.OtherFilesGen#start()
+ /// </summary>
+ public static class QueryParserConstants
+ {
+ /// <summary>Literal token values. </summary>
+ public static string[] TokenImage = new string[] {
+ "<EOF>",
+ "<_NUM_CHAR>",
+ "<_TERM_CHAR>",
+ "<_WHITESPACE>",
+ "\"*\"",
+ "\"?\"",
+ "<_DISTOP_NUM>",
+ "<token of kind 7>",
+ "<OR>",
+ "<AND>",
+ "<NOT>",
+ "<W>",
+ "<N>",
+ "\"(\"",
+ "\")\"",
+ "\",\"",
+ "\":\"",
+ "\"^\"",
+ "<TRUNCQUOTED>",
+ "<QUOTED>",
+ "<SUFFIXTERM>",
+ "<TRUNCTERM>",
+ "<TERM>",
+ "<NUMBER>"
+ };
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
new file mode 100644
index 0000000..ac3d611
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
@@ -0,0 +1,760 @@
+\ufeffusing System;
+using System.Diagnostics.CodeAnalysis;
+using System.IO;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+ /*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ /// <summary>
+ /// Token Manager.
+ /// </summary>
+ public class QueryParserTokenManager //: QueryParserConstants
+ {
+ private void InitBlock()
+ {
+ StreamWriter temp_writer;
+ temp_writer = new StreamWriter(Console.OpenStandardOutput(), Console.Out.Encoding);
+ temp_writer.AutoFlush = true;
+ debugStream = temp_writer;
+ }
+
+ /// <summary>Debug output. </summary>
+ public StreamWriter debugStream;
+ /// <summary>Set debug output. </summary>
+ public virtual void SetDebugStream(StreamWriter ds)
+ {
+ debugStream = ds;
+ }
+ private int JjStopStringLiteralDfa_1(int pos, long active0)
+ {
+ switch (pos)
+ {
+ default:
+ return -1;
+ }
+ }
+ private int JjStartNfa_1(int pos, long active0)
+ {
+ return JjMoveNfa_1(JjStopStringLiteralDfa_1(pos, active0), pos + 1);
+ }
+ private int JjStopAtPos(int pos, int kind)
+ {
+ jjmatchedKind = kind;
+ jjmatchedPos = pos;
+ return pos + 1;
+ }
+ private int jjMoveStringLiteralDfa0_1()
+ {
+ switch (curChar)
+ {
+ case (char)40:
+ return JjStopAtPos(0, 13);
+ case (char)41:
+ return JjStopAtPos(0, 14);
+ case (char)44:
+ return JjStopAtPos(0, 15);
+ case (char)58:
+ return JjStopAtPos(0, 16);
+ case (char)94:
+ return JjStopAtPos(0, 17);
+ default:
+ return JjMoveNfa_1(0, 0);
+ }
+ }
+ internal static readonly ulong[] jjbitVec0 = {
+ 0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL
+ };
+ internal static readonly ulong[] jjbitVec2 = {
+ 0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
+ };
+ private int JjMoveNfa_1(int startState, int curPos)
+ {
+ int startsAt = 0;
+ jjnewStateCnt = 38;
+ int i = 1;
+ jjstateSet[0] = startState;
+ int kind = 0x7fffffff;
+ for (; ; )
+ {
+ if (++jjround == 0x7fffffff)
+ ReInitRounds();
+ if (curChar < 64)
+ {
+ ulong l = (ulong)(1L << (int)curChar);
+ do
+ {
+ switch (jjstateSet[--i])
+ {
+ case 0:
+ if ((0x7bffe8faffffd9ffL & l) != 0L)
+ {
+ if (kind > 22)
+ kind = 22;
+ JjCheckNAddStates(0, 4);
+ }
+ else if ((0x100002600L & l) != 0L)
+ {
+ if (kind > 7)
+ kind = 7;
+ }
+ else if (curChar == 34)
+ JjCheckNAddStates(5, 7);
+ if ((0x3fc000000000000L & l) != 0L)
+ JjCheckNAddStates(8, 11);
+ else if (curChar == 49)
+ JjCheckNAddTwoStates(20, 21);
+ break;
+ case 19:
+ if ((0x3fc000000000000L & l) != 0L)
+ JjCheckNAddStates(8, 11);
+ break;
+ case 20:
+ if ((0x3ff000000000000L & l) != 0L)
+ JjCheckNAdd(17);
+ break;
+ case 21:
+ if ((0x3ff000000000000L & l) != 0L)
+ JjCheckNAdd(18);
+ break;
+ case 22:
+ if (curChar == 49)
+ JjCheckNAddTwoStates(20, 21);
+ break;
+ case 23:
+ if (curChar == 34)
+ JjCheckNAddStates(5, 7);
+ break;
+ case 24:
+ if ((0xfffffffbffffffffL & l) != (ulong)0L)
+ JjCheckNAddTwoStates(24, 25);
+ break;
+ case 25:
+ if (curChar == 34)
+ jjstateSet[jjnewStateCnt++] = 26;
+ break;
+ case 26:
+ if (curChar == 42 && kind > 18)
+ kind = 18;
+ break;
+ case 27:
+ if ((0xfffffffbffffffffL & l) != (ulong)0L)
+ JjCheckNAddStates(12, 14);
+ break;
+ case 29:
+ if (curChar == 34)
+ JjCheckNAddStates(12, 14);
+ break;
+ case 30:
+ if (curChar == 34 && kind > 19)
+ kind = 19;
+ break;
+ case 31:
+ if ((0x7bffe8faffffd9ffL & l) == 0L)
+ break;
+ if (kind > 22)
+ kind = 22;
+ JjCheckNAddStates(0, 4);
+ break;
+ case 32:
+ if ((0x7bffe8faffffd9ffL & l) != 0L)
+ JjCheckNAddTwoStates(32, 33);
+ break;
+ case 33:
+ if (curChar == 42 && kind > 20)
+ kind = 20;
+ break;
+ case 34:
+ if ((0x7bffe8faffffd9ffL & l) != 0L)
+ JjCheckNAddTwoStates(34, 35);
+ break;
+ case 35:
+ if ((0x8000040000000000L & l) == (ulong)0L)
+ break;
+ if (kind > 21)
+ kind = 21;
+ JjCheckNAddTwoStates(35, 36);
+ break;
+ case 36:
+ if ((0xfbffecfaffffd9ffL & l) == (ulong)0L)
+ break;
+ if (kind > 21)
+ kind = 21;
+ JjCheckNAdd(36);
+ break;
+ case 37:
+ if ((0x7bffe8faffffd9ffL & l) == 0L)
+ break;
+ if (kind > 22)
+ kind = 22;
+ JjCheckNAdd(37);
+ break;
+ default: break;
+ }
+ } while (i != startsAt);
+ }
+ else if (curChar < 128)
+ {
+ // NOTE: See the note in the Classic.QueryParserTokenManager.cs file.
+ // I am working under the assumption 63 is the correct value, since it
+ // made the tests pass there.
+ ulong l = (ulong)(1L << (curChar & 63));
+ //long l = 1L << (curChar & 077);
+ do
+ {
+ switch (jjstateSet[--i])
+ {
+ case 0:
+ if ((0xffffffffbfffffffL & l) != (ulong)0L)
+ {
+ if (kind > 22)
+ kind = 22;
+ JjCheckNAddStates(0, 4);
+ }
+ if ((0x400000004000L & l) != 0L)
+ {
+ if (kind > 12)
+ kind = 12;
+ }
+ else if ((0x80000000800000L & l) != 0L)
+ {
+ if (kind > 11)
+ kind = 11;
+ }
+ else if (curChar == 97)
+ jjstateSet[jjnewStateCnt++] = 9;
+ else if (curChar == 65)
+ jjstateSet[jjnewStateCnt++] = 6;
+ else if (curChar == 111)
+ jjstateSet[jjnewStateCnt++] = 3;
+ else if (curChar == 79)
+ jjstateSet[jjnewStateCnt++] = 1;
+ if (curChar == 110)
+ jjstateSet[jjnewStateCnt++] = 15;
+ else if (curChar == 78)
+ jjstateSet[jjnewStateCnt++] = 12;
+ break;
+ case 1:
+ if (curChar == 82 && kind > 8)
+ kind = 8;
+ break;
+ case 2:
+ if (curChar == 79)
+ jjstateSet[jjnewStateCnt++] = 1;
+ break;
+ case 3:
+ if (curChar == 114 && kind > 8)
+ kind = 8;
+ break;
+ case 4:
+ if (curChar == 111)
+ jjstateSet[jjnewStateCnt++] = 3;
+ break;
+ case 5:
+ if (curChar == 68 && kind > 9)
+ kind = 9;
+ break;
+ case 6:
+ if (curChar == 78)
+ jjstateSet[jjnewStateCnt++] = 5;
+ break;
+ case 7:
+ if (curChar == 65)
+ jjstateSet[jjnewStateCnt++] = 6;
+ break;
+ case 8:
+ if (curChar == 100 && kind > 9)
+ kind = 9;
+ break;
+ case 9:
+ if (curChar == 110)
+ jjstateSet[jjnewStateCnt++] = 8;
+ break;
+ case 10:
+ if (curChar == 97)
+ jjstateSet[jjnewStateCnt++] = 9;
+ break;
+ case 11:
+ if (curChar == 84 && kind > 10)
+ kind = 10;
+ break;
+ case 12:
+ if (curChar == 79)
+ jjstateSet[jjnewStateCnt++] = 11;
+ break;
+ case 13:
+ if (curChar == 78)
+ jjstateSet[jjnewStateCnt++] = 12;
+ break;
+ case 14:
+ if (curChar == 116 && kind > 10)
+ kind = 10;
+ break;
+ case 15:
+ if (curChar == 111)
+ jjstateSet[jjnewStateCnt++] = 14;
+ break;
+ case 16:
+ if (curChar == 110)
+ jjstateSet[jjnewStateCnt++] = 15;
+ break;
+ case 17:
+ if ((0x80000000800000L & l) != 0L && kind > 11)
+ kind = 11;
+ break;
+ case 18:
+ if ((0x400000004000L & l) != 0L && kind > 12)
+ kind = 12;
+ break;
+ case 24:
+ JjAddStates(15, 16);
+ break;
+ case 27:
+ if ((0xffffffffefffffffL & l) != (ulong)0L)
+ JjCheckNAddStates(12, 14);
+ break;
+ case 28:
+ if (curChar == 92)
+ jjstateSet[jjnewStateCnt++] = 29;
+ break;
+ case 29:
+ if (curChar == 92)
+ JjCheckNAddStates(12, 14);
+ break;
+ case 31:
+ if ((0xffffffffbfffffffL & l) == (ulong)0L)
+ break;
+ if (kind > 22)
+ kind = 22;
+ JjCheckNAddStates(0, 4);
+ break;
+ case 32:
+ if ((0xffffffffbfffffffL & l) != (ulong)0L)
+ JjCheckNAddTwoStates(32, 33);
+ break;
+ case 34:
+ if ((0xffffffffbfffffffL & l) != (ulong)0L)
+ JjCheckNAddTwoStates(34, 35);
+ break;
+ case 36:
+ if ((0xffffffffbfffffffL & l) == (ulong)0L)
+ break;
+ if (kind > 21)
+ kind = 21;
+ jjstateSet[jjnewStateCnt++] = 36;
+ break;
+ case 37:
+ if ((0xffffffffbfffffffL & l) == (ulong)0L)
+ break;
+ if (kind > 22)
+ kind = 22;
+ JjCheckNAdd(37);
+ break;
+ default: break;
+ }
+ } while (i != startsAt);
+ }
+ else
+ {
+ int hiByte = (int)(curChar >> 8);
+ int i1 = hiByte >> 6;
+ //long l1 = 1L << (hiByte & 077);
+ ulong l1 = (ulong)(1L << (hiByte & 63));
+ int i2 = (curChar & 0xff) >> 6;
+ //long l2 = 1L << (curChar & 077);
+ ulong l2 = (ulong)(1L << (curChar & 63));
+ do
+ {
+ switch (jjstateSet[--i])
+ {
+ case 0:
+ if (!JjCanMove_0(hiByte, i1, i2, l1, l2))
+ break;
+ if (kind > 22)
+ kind = 22;
+ JjCheckNAddStates(0, 4);
+ break;
+ case 24:
+ if (JjCanMove_0(hiByte, i1, i2, l1, l2))
+ JjAddStates(15, 16);
+ break;
+ case 27:
+ if (JjCanMove_0(hiByte, i1, i2, l1, l2))
+ JjAddStates(12, 14);
+ break;
+ case 32:
+ if (JjCanMove_0(hiByte, i1, i2, l1, l2))
+ JjCheckNAddTwoStates(32, 33);
+ break;
+ case 34:
+ if (JjCanMove_0(hiByte, i1, i2, l1, l2))
+ JjCheckNAddTwoStates(34, 35);
+ break;
+ case 36:
+ if (!JjCanMove_0(hiByte, i1, i2, l1, l2))
+ break;
+ if (kind > 21)
+ kind = 21;
+ jjstateSet[jjnewStateCnt++] = 36;
+ break;
+ case 37:
+ if (!JjCanMove_0(hiByte, i1, i2, l1, l2))
+ break;
+ if (kind > 22)
+ kind = 22;
+ JjCheckNAdd(37);
+ break;
+ default: break;
+ }
+ } while (i != startsAt);
+ }
+ if (kind != 0x7fffffff)
+ {
+ jjmatchedKind = kind;
+ jjmatchedPos = curPos;
+ kind = 0x7fffffff;
+ }
+ ++curPos;
+ if ((i = jjnewStateCnt) == (startsAt = 38 - (jjnewStateCnt = startsAt)))
+ return curPos;
+ try { curChar = input_stream.ReadChar(); }
+ catch (System.IO.IOException e) { return curPos; }
+ }
+ }
+
+ private int JjMoveStringLiteralDfa0_0()
+ {
+ return JjMoveNfa_0(0, 0);
+ }
+ private int JjMoveNfa_0(int startState, int curPos)
+ {
+ int startsAt = 0;
+ jjnewStateCnt = 3;
+ int i = 1;
+ jjstateSet[0] = startState;
+ int kind = 0x7fffffff;
+ for (; ; )
+ {
+ if (++jjround == 0x7fffffff)
+ ReInitRounds();
+ if (curChar < 64)
+ {
+ long l = 1L << curChar;
+ do
+ {
+ switch (jjstateSet[--i])
+ {
+ case 0:
+ if ((0x3ff000000000000L & l) == 0L)
+ break;
+ if (kind > 23)
+ kind = 23;
+ JjAddStates(17, 18);
+ break;
+ case 1:
+ if (curChar == 46)
+ JjCheckNAdd(2);
+ break;
+ case 2:
+ if ((0x3ff000000000000L & l) == 0L)
+ break;
+ if (kind > 23)
+ kind = 23;
+ JjCheckNAdd(2);
+ break;
+ default: break;
+ }
+ } while (i != startsAt);
+ }
+ else if (curChar < 128)
+ {
+ //long l = 1L << (curChar & 077);
+ ulong l = (ulong)(1L << (curChar & 63));
+ do
+ {
+ switch (jjstateSet[--i])
+ {
+ default: break;
+ }
+ } while (i != startsAt);
+ }
+ else
+ {
+ int hiByte = (int)(curChar >> 8);
+ int i1 = hiByte >> 6;
+ //long l1 = 1L << (hiByte & 077);
+ ulong l1 = (ulong)(1L << (hiByte & 63));
+ int i2 = (curChar & 0xff) >> 6;
+ //long l2 = 1L << (curChar & 077);
+ ulong l2 = (ulong)(1L << (curChar & 63));
+ do
+ {
+ switch (jjstateSet[--i])
+ {
+ default: break;
+ }
+ } while (i != startsAt);
+ }
+ if (kind != 0x7fffffff)
+ {
+ jjmatchedKind = kind;
+ jjmatchedPos = curPos;
+ kind = 0x7fffffff;
+ }
+ ++curPos;
+ if ((i = jjnewStateCnt) == (startsAt = 3 - (jjnewStateCnt = startsAt)))
+ return curPos;
+ try { curChar = input_stream.ReadChar(); }
+ catch (System.IO.IOException e) { return curPos; }
+ }
+ }
+ internal static readonly int[] jjnextStates = {
+ 32, 33, 34, 35, 37, 24, 27, 28, 20, 17, 21, 18, 27, 28, 30, 24,
+ 25, 0, 1,
+ };
+ private static bool JjCanMove_0(int hiByte, int i1, int i2, ulong l1, ulong l2)
+ {
+ switch (hiByte)
+ {
+ case 0:
+ return ((jjbitVec2[i2] & l2) != 0L);
+ default:
+ if ((jjbitVec0[i1] & l1) != 0L)
+ return true;
+ return false;
+ }
+ }
+
+ /** Token literal values. */
+ //public static readonly string[] jjstrLiteralImages = {
+ // "", null, null, null, null, null, null, null, null, null, null, null, null,
+ // "\50", "\51", "\54", "\72", "\136", null, null, null, null, null, null
+ //};
+
+ public static readonly string[] jjstrLiteralImages = {
+ "", null, null, null, null, null, null, null, null, null, null, null, null,
+ "\x0028" /*"\50"*/, "\x0029" /*"\51"*/, "\x002C" /*"\54"*/, "\x003A" /*"\72"*/, "\x005E" /*"\136"*/, null, null, null, null, null, null
+ };
+
+ /** Lexer state names. */
+ public static readonly string[] lexStateNames = {
+ "Boost",
+ "DEFAULT"
+ };
+
+ /** Lex State array. */
+ public static readonly int[] jjnewLexState = {
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, -1, -1, -1, -1, -1, 1,
+ };
+ internal static readonly long[] jjtoToken = {
+ 0xffff01L,
+ };
+ internal static readonly long[] jjtoSkip = {
+ 0x80L,
+ };
+ protected ICharStream input_stream;
+ private readonly uint[] jjrounds = new uint[38];
+ private readonly int[] jjstateSet = new int[76];
+ protected internal char curChar;
+
+ /** Constructor. */
+ public QueryParserTokenManager(ICharStream stream)
+ {
+ InitBlock();
+ input_stream = stream;
+ }
+
+ /** Constructor. */
+ public QueryParserTokenManager(ICharStream stream, int lexState)
+ : this(stream)
+ {
+ SwitchTo(lexState);
+ }
+
+ /** Reinitialise parser. */
+ public void ReInit(ICharStream stream)
+ {
+ jjmatchedPos = jjnewStateCnt = 0;
+ curLexState = defaultLexState;
+ input_stream = stream;
+ ReInitRounds();
+ }
+ private void ReInitRounds()
+ {
+ int i;
+ jjround = 0x80000001;
+ for (i = 38; i-- > 0; )
+ jjrounds[i] = 0x80000000;
+ }
+
+ /** Reinitialise parser. */
+ public void ReInit(ICharStream stream, int lexState)
+ {
+ ReInit(stream);
+ SwitchTo(lexState);
+ }
+
+ /** Switch to specified lex state. */
+ public void SwitchTo(int lexState)
+ {
+ if (lexState >= 2 || lexState < 0)
+ throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
+ else
+ curLexState = lexState;
+ }
+
+ protected Token JjFillToken()
+ {
+ Token t;
+ string curTokenImage;
+ int beginLine;
+ int endLine;
+ int beginColumn;
+ int endColumn;
+ string im = jjstrLiteralImages[jjmatchedKind];
+ curTokenImage = (im == null) ? input_stream.Image : im;
+ beginLine = input_stream.BeginLine;
+ beginColumn = input_stream.BeginColumn;
+ endLine = input_stream.EndLine;
+ endColumn = input_stream.EndColumn;
+ t = Token.NewToken(jjmatchedKind, curTokenImage);
+
+ t.beginLine = beginLine;
+ t.endLine = endLine;
+ t.beginColumn = beginColumn;
+ t.endColumn = endColumn;
+
+ return t;
+ }
+
+ internal int curLexState = 1;
+ internal int defaultLexState = 1;
+ internal int jjnewStateCnt;
+ internal uint jjround;
+ internal int jjmatchedPos;
+ internal int jjmatchedKind;
+
+ /// <summary>Get the next Token.</summary>
+ [SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate")]
+ public Token GetNextToken()
+ {
+ Token matchedToken;
+ int curPos = 0;
+
+ for (; ; )
+ {
+ try
+ {
+ curChar = input_stream.BeginToken();
+ }
+ catch (System.IO.IOException e)
+ {
+ jjmatchedKind = 0;
+ matchedToken = JjFillToken();
+ return matchedToken;
+ }
+
+ switch (curLexState)
+ {
+ case 0:
+ jjmatchedKind = 0x7fffffff;
+ jjmatchedPos = 0;
+ curPos = JjMoveStringLiteralDfa0_0();
+ break;
+ case 1:
+ jjmatchedKind = 0x7fffffff;
+ jjmatchedPos = 0;
+ curPos = jjMoveStringLiteralDfa0_1();
+ break;
+ }
+ if (jjmatchedKind != 0x7fffffff)
+ {
+ if (jjmatchedPos + 1 < curPos)
+ input_stream.Backup(curPos - jjmatchedPos - 1);
+ if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
+ {
+ matchedToken = JjFillToken();
+ if (jjnewLexState[jjmatchedKind] != -1)
+ curLexState = jjnewLexState[jjmatchedKind];
+ return matchedToken;
+ }
+ else
+ {
+ if (jjnewLexState[jjmatchedKind] != -1)
+ curLexState = jjnewLexState[jjmatchedKind];
+ goto EOFLoop;
+ }
+ }
+ int error_line = input_stream.EndLine;
+ int error_column = input_stream.EndColumn;
+ string error_after = null;
+ bool EOFSeen = false;
+ try { input_stream.ReadChar(); input_stream.Backup(1); }
+ catch (System.IO.IOException e1)
+ {
+ EOFSeen = true;
+ error_after = curPos <= 1 ? "" : input_stream.Image;
+ if (curChar == '\n' || curChar == '\r')
+ {
+ error_line++;
+ error_column = 0;
+ }
+ else
+ error_column++;
+ }
+ if (!EOFSeen)
+ {
+ input_stream.Backup(1);
+ error_after = curPos <= 1 ? "" : input_stream.Image;
+ }
+ throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
+ EOFLoop: ;
+ }
+ }
+
+ private void JjCheckNAdd(int state)
+ {
+ if (jjrounds[state] != jjround)
+ {
+ jjstateSet[jjnewStateCnt++] = state;
+ jjrounds[state] = jjround;
+ }
+ }
+ private void JjAddStates(int start, int end)
+ {
+ do
+ {
+ jjstateSet[jjnewStateCnt++] = jjnextStates[start];
+ } while (start++ != end);
+ }
+ private void JjCheckNAddTwoStates(int state1, int state2)
+ {
+ JjCheckNAdd(state1);
+ JjCheckNAdd(state2);
+ }
+
+ private void JjCheckNAddStates(int start, int end)
+ {
+ do
+ {
+ JjCheckNAdd(jjnextStates[start]);
+ } while (start++ != end);
+ }
+
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Parser/Token.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/Token.cs b/src/Lucene.Net.QueryParser/Surround/Parser/Token.cs
new file mode 100644
index 0000000..2d9b83d
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/Token.cs
@@ -0,0 +1,142 @@
+\ufeffusing System;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+ /*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ /// <summary>
+ /// Describes the input token stream.
+ /// </summary>
+ [Serializable]
+ public class Token
+ {
+
+ /// <summary>
+ /// An integer that describes the kind of this token. This numbering
+ /// system is determined by JavaCCParser, and a table of these numbers is
+ /// stored in the file ...Constants.java.
+ /// </summary>
+ public int kind;
+
+ /// <summary>The line number of the first character of this Token. </summary>
+ public int beginLine;
+ /// <summary>The column number of the first character of this Token. </summary>
+ public int beginColumn;
+ /// <summary>The line number of the last character of this Token. </summary>
+ public int endLine;
+ /// <summary>The column number of the last character of this Token. </summary>
+ public int endColumn;
+
+ /// <summary>The string image of the token.</summary>
+ public string image;
+
+ /// <summary>
+ /// A reference to the next regular (non-special) token from the input
+ /// stream. If this is the last token from the input stream, or if the
+ /// token manager has not read tokens beyond this one, this field is
+ /// set to null. This is true only if this token is also a regular
+ /// token. Otherwise, see below for a description of the contents of
+ /// this field.
+ /// </summary>
+ public Token next;
+
+ /// <summary>
+ /// This field is used to access special tokens that occur prior to this
+ /// token, but after the immediately preceding regular (non-special) token.
+ /// If there are no such special tokens, this field is set to null.
+ /// When there are more than one such special token, this field refers
+ /// to the last of these special tokens, which in turn refers to the next
+ /// previous special token through its specialToken field, and so on
+ /// until the first special token (whose specialToken field is null).
+ /// The next fields of special tokens refer to other special tokens that
+ /// immediately follow it (without an intervening regular token). If there
+ /// is no such token, this field is null.
+ /// </summary>
+ public Token specialToken;
+
+ /// <summary>
+ /// An optional attribute value of the Token.
+ /// Tokens which are not used as syntactic sugar will often contain
+ /// meaningful values that will be used later on by the compiler or
+ /// interpreter. This attribute value is often different from the image.
+ /// Any subclass of Token that actually wants to return a non-null value can
+ /// override this method as appropriate.
+ /// </summary>
+ public virtual object Value
+ {
+ get { return null; }
+ }
+
+ /// <summary>
+ /// No-argument constructor
+ /// </summary>
+ public Token()
+ {
+ }
+
+ /// <summary>
+ /// Constructs a new token for the specified Image.
+ /// </summary>
+ public Token(int kind)
+ : this(kind, null)
+ {
+ }
+
+ /// <summary>
+ /// Constructs a new token for the specified Image and Kind.
+ /// </summary>
+ public Token(int kind, string image)
+ {
+ this.kind = kind;
+ this.image = image;
+ }
+
+ /// <summary>
+ /// Returns the image.
+ /// </summary>
+ public override string ToString()
+ {
+ return image;
+ }
+
+ /// <summary>
+ /// Returns a new Token object, by default. However, if you want, you
+ /// can create and return subclass objects based on the value of ofKind.
+ /// Simply add the cases to the switch for all those special cases.
+ /// For example, if you have a subclass of Token called IDToken that
+ /// you want to create if ofKind is ID, simply add something like :
+ ///
+ /// case MyParserConstants.ID : return new IDToken(ofKind, image);
+ ///
+ /// to the following switch statement. Then you can cast matchedToken
+ /// variable to the appropriate type and use sit in your lexical actions.
+ /// </summary>
+ public static Token NewToken(int ofKind, string image)
+ {
+ switch (ofKind)
+ {
+ default: return new Token(ofKind, image);
+ }
+ }
+
+ public static Token NewToken(int ofKind)
+ {
+ return NewToken(ofKind, null);
+ }
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs b/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
new file mode 100644
index 0000000..2ccfc58
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/TokenMgrError.cs
@@ -0,0 +1,170 @@
+\ufeffusing System;
+using System.Text;
+
+namespace Lucene.Net.QueryParser.Surround.Parser
+{
+ /*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ /// <summary>Token Manager Error. </summary>
+ [Serializable]
+ public class TokenMgrError : Exception
+ {
+ /*
+ * Ordinals for various reasons why an Error of this type can be thrown.
+ */
+
+ /// <summary> Lexical error occurred.</summary>
+ internal const int LEXICAL_ERROR = 0;
+
+ /// <summary> An attempt was made to create a second instance of a static token manager.</summary>
+ internal const int STATIC_LEXER_ERROR = 1;
+
+ /// <summary> Tried to change to an invalid lexical state.</summary>
+ internal const int INVALID_LEXICAL_STATE = 2;
+
+ /// <summary> Detected (and bailed out of) an infinite loop in the token manager.</summary>
+ internal const int LOOP_DETECTED = 3;
+
+ /// <summary> Indicates the reason why the exception is thrown. It will have
+ /// one of the above 4 values.
+ /// </summary>
+ internal int errorCode;
+
+ /// <summary>
+ /// Replaces unprintable characters by their escaped (or unicode escaped)
+ /// equivalents in the given string
+ /// </summary>
+ protected internal static string AddEscapes(string str)
+ {
+ StringBuilder retval = new StringBuilder();
+ char ch;
+ for (int i = 0; i < str.Length; i++)
+ {
+ switch (str[i])
+ {
+
+ case (char)(0):
+ continue;
+
+ case '\b':
+ retval.Append("\\b");
+ continue;
+
+ case '\t':
+ retval.Append("\\t");
+ continue;
+
+ case '\n':
+ retval.Append("\\n");
+ continue;
+
+ case '\f':
+ retval.Append("\\f");
+ continue;
+
+ case '\r':
+ retval.Append("\\r");
+ continue;
+
+ case '\"':
+ retval.Append("\\\"");
+ continue;
+
+ case '\'':
+ retval.Append("\\\'");
+ continue;
+
+ case '\\':
+ retval.Append("\\\\");
+ continue;
+
+ default:
+ if ((ch = str[i]) < 0x20 || ch > 0x7e)
+ {
+ string s = "0000" + Convert.ToString(ch, 16);
+ retval.Append("\\u" + s.Substring(s.Length - 4, (s.Length) - (s.Length - 4)));
+ }
+ else
+ {
+ retval.Append(ch);
+ }
+ continue;
+
+ }
+ }
+ return retval.ToString();
+ }
+
+ /// <summary>
+ /// Returns a detailed message for the Error when it is thrown by the
+ /// token manager to indicate a lexical error.
+ /// </summary>
+ /// <remarks>You can customize the lexical error message by modifying this method.</remarks>
+ /// <param name="EOFSeen">indicates if EOF caused the lexical error</param>
+ /// <param name="lexState">lexical state in which this error occurred</param>
+ /// <param name="errorLine">line number when the error occurred</param>
+ /// <param name="errorColumn">column number when the error occurred</param>
+ /// <param name="errorAfter">prefix that was seen before this error occurred</param>
+ /// <param name="curChar">the offending character</param>
+ /// <returns>Detailed error message</returns>
+ protected internal static string LexicalError(bool EOFSeen, int lexState, int errorLine, int errorColumn, string errorAfter, char curChar)
+ {
+ return ("Lexical error at line " +
+ errorLine + ", column " +
+ errorColumn + ". Encountered: " +
+ (EOFSeen ? "<EOF> " : ("\"" + AddEscapes(Convert.ToString(curChar)) + "\"") + " (" + (int)curChar + "), ") +
+ "after : \"" + AddEscapes(errorAfter) + "\"");
+ }
+
+ /// <summary>
+ /// You can also modify the body of this method to customize your error messages.
+ /// For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not
+ /// of end-users concern, so you can return something like :
+ ///
+ /// "Internal Error : Please file a bug report .... "
+ ///
+ /// from this method for such cases in the release version of your parser.
+ /// </summary>
+ public override string Message
+ {
+ get { return base.Message; }
+ }
+
+ /*
+ * Constructors of various flavors follow.
+ */
+
+ /// <summary>No arg constructor. </summary>
+ public TokenMgrError()
+ {
+ }
+
+ /// <summary>Constructor with message and reason. </summary>
+ public TokenMgrError(string message, int reason)
+ : base(message)
+ {
+ errorCode = reason;
+ }
+
+ /// <summary>Full Constructor. </summary>
+ public TokenMgrError(bool EOFSeen, int lexState, int errorLine, int errorColumn, string errorAfter, char curChar, int reason)
+ : this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason)
+ {
+ }
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs b/src/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs
new file mode 100644
index 0000000..aa00e0d
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/AndQuery.cs
@@ -0,0 +1,39 @@
+\ufeffusing Lucene.Net.Search;
+using System.Collections.Generic;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+ /*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ /// <summary>
+ /// Factory for conjunctions
+ /// </summary>
+ public class AndQuery : ComposedQuery
+ {
+ public AndQuery(IEnumerable<SrndQuery> queries, bool inf, string opName)
+ : base(queries, inf, opName)
+ {
+ }
+
+ public override Search.Query MakeLuceneQueryFieldNoBoost(string fieldName, BasicQueryFactory qf)
+ {
+ return SrndBooleanQuery.MakeBooleanQuery( /* subqueries can be individually boosted */
+ MakeLuceneSubQueriesField(fieldName, qf), BooleanClause.Occur.MUST);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/679ad24c/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs b/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
new file mode 100644
index 0000000..8992746
--- /dev/null
+++ b/src/Lucene.Net.QueryParser/Surround/Query/BasicQueryFactory.cs
@@ -0,0 +1,110 @@
+\ufeffusing Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Search.Spans;
+using System.Runtime.CompilerServices;
+
+namespace Lucene.Net.QueryParser.Surround.Query
+{
+ /*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+ // Create basic queries to be used during rewrite.
+ // The basic queries are TermQuery and SpanTermQuery.
+ // An exception can be thrown when too many of these are used.
+ // SpanTermQuery and TermQuery use IndexReader.termEnum(Term), which causes the buffer usage.
+
+ // Use this class to limit the buffer usage for reading terms from an index.
+ // Default is 1024, the same as the max. number of subqueries for a BooleanQuery.
+
+
+
+ /// <summary>
+ /// Factory for creating basic term queries
+ /// </summary>
+ public class BasicQueryFactory
+ {
+ public BasicQueryFactory(int maxBasicQueries)
+ {
+ this.maxBasicQueries = maxBasicQueries;
+ this.queriesMade = 0;
+ }
+
+ public BasicQueryFactory()
+ : this(1024)
+ {
+ }
+
+ private int maxBasicQueries;
+ private int queriesMade;
+
+ public int NrQueriesMade { get { return queriesMade; } }
+ public int MaxBasicQueries { get { return maxBasicQueries; } }
+
+ public override string ToString()
+ {
+ return GetType().Name
+ + "(maxBasicQueries: " + maxBasicQueries
+ + ", queriesMade: " + queriesMade
+ + ")";
+ }
+
+ private bool AtMax
+ {
+ get { return queriesMade >= maxBasicQueries; }
+ }
+
+ [MethodImpl(MethodImplOptions.Synchronized)]
+ protected virtual void CheckMax()
+ {
+ if (AtMax)
+ throw new TooManyBasicQueries(MaxBasicQueries);
+ queriesMade++;
+ }
+
+ public TermQuery NewTermQuery(Term term)
+ {
+ CheckMax();
+ return new TermQuery(term);
+ }
+
+ public SpanTermQuery NewSpanTermQuery(Term term)
+ {
+ CheckMax();
+ return new SpanTermQuery(term);
+ }
+
+ public override int GetHashCode()
+ {
+ return GetType().GetHashCode() ^ (AtMax ? 7 : 31 * 32);
+ }
+
+ /// <summary>
+ /// Two BasicQueryFactory's are equal when they generate
+ /// the same types of basic queries, or both cannot generate queries anymore.
+ /// </summary>
+ /// <param name="obj"></param>
+ /// <returns></returns>
+ public override bool Equals(object obj)
+ {
+ if (!(obj is BasicQueryFactory))
+ return false;
+ BasicQueryFactory other = (BasicQueryFactory)obj;
+ return AtMax == other.AtMax;
+ }
+ }
+}