You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ni...@apache.org on 2017/02/01 05:19:12 UTC
[13/15] lucenenet git commit: Lucene.Net.QueryParser: changed public
fields into properties or marked them private. Added SafeTextWriterWrapper to
debugStreams.
Lucene.Net.QueryParser: changed public fields into properties or marked them private. Added SafeTextWriterWrapper to debugStreams.
Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/13db3e69
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/13db3e69
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/13db3e69
Branch: refs/heads/api-work
Commit: 13db3e6996be8bace00ba0b3cb35d017d4fe7cdc
Parents: 7667d56
Author: Shad Storhaug <sh...@shadstorhaug.com>
Authored: Wed Feb 1 11:54:56 2017 +0700
Committer: Shad Storhaug <sh...@shadstorhaug.com>
Committed: Wed Feb 1 11:54:56 2017 +0700
----------------------------------------------------------------------
.../Classic/ParseException.cs | 39 ++++--
.../Classic/QueryParser.cs | 88 ++++++-------
.../Classic/QueryParserBase.cs | 16 +--
.../Classic/QueryParserTokenManager.cs | 13 +-
src/Lucene.Net.QueryParser/Classic/Token.cs | 92 ++++++-------
.../Standard/Nodes/NumericRangeQueryNode.cs | 3 +-
.../Flexible/Standard/Parser/ParseException.cs | 39 ++++--
.../Standard/Parser/StandardSyntaxParser.cs | 128 +++++++++----------
.../Parser/StandardSyntaxParserTokenManager.cs | 15 ++-
.../Flexible/Standard/Parser/Token.cs | 22 ++--
.../Surround/Parser/ParseException.cs | 41 ++++--
.../Surround/Parser/QueryParser.cs | 112 ++++++++--------
.../Surround/Parser/QueryParserTokenManager.cs | 15 ++-
.../Surround/Parser/Token.cs | 98 +++++++-------
.../Classic/TestQueryParser.cs | 4 +-
15 files changed, 396 insertions(+), 329 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/13db3e69/src/Lucene.Net.QueryParser/Classic/ParseException.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/ParseException.cs b/src/Lucene.Net.QueryParser/Classic/ParseException.cs
index 89d2a07..516497b 100644
--- a/src/Lucene.Net.QueryParser/Classic/ParseException.cs
+++ b/src/Lucene.Net.QueryParser/Classic/ParseException.cs
@@ -1,4 +1,6 @@
+using Lucene.Net.Support;
using System;
+using System.Diagnostics.CodeAnalysis;
using System.Text;
namespace Lucene.Net.QueryParsers.Classic
@@ -77,21 +79,40 @@ namespace Lucene.Net.QueryParsers.Classic
/// this object has been created due to a parse error, the token
/// following this token will (therefore) be the first error token.
/// </summary>
- public Token currentToken;
+ public Token CurrentToken
+ {
+ get { return currentToken; }
+ set { currentToken = value; }
+ }
+ private Token currentToken;
/// <summary>
/// Each entry in this array is an array of integers. Each array
/// of integers represents a sequence of tokens (by their ordinal
/// values) that is expected at this point of the parse.
/// </summary>
- public int[][] expectedTokenSequences;
+ [WritableArray]
+ [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")]
+ public int[][] ExpectedTokenSequences
+ {
+ get { return expectedTokenSequences; }
+ set { expectedTokenSequences = value; }
+ }
+ private int[][] expectedTokenSequences;
/// <summary>
/// This is a reference to the "tokenImage" array of the generated
/// parser within which the parse error occurred. This array is
/// defined in the generated ...Constants interface.
/// </summary>
- public string[] tokenImage;
+ [WritableArray]
+ [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")]
+ public string[] TokenImage
+ {
+ get { return tokenImage; }
+ set { tokenImage = value; }
+ }
+ private string[] tokenImage;
/// <summary>
@@ -125,23 +146,23 @@ namespace Lucene.Net.QueryParsers.Classic
expected.Append(eol).Append(" ");
}
string retval = "Encountered \"";
- Token tok = currentToken.next;
+ Token tok = currentToken.Next;
for (int i = 0; i < maxSize; i++)
{
if (i != 0)
retval += " ";
- if (tok.kind == 0)
+ if (tok.Kind == 0)
{
retval += tokenImage[0];
break;
}
- retval += (" " + tokenImage[tok.kind]);
+ retval += (" " + tokenImage[tok.Kind]);
retval += " \"";
- retval += Add_escapes(tok.image);
+ retval += Add_escapes(tok.Image);
retval += " \"";
- tok = tok.next;
+ tok = tok.Next;
}
- retval += ("\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn);
+ retval += ("\" at line " + currentToken.Next.BeginLine + ", column " + currentToken.Next.BeginColumn);
retval += ("." + eol);
if (expectedTokenSequences.Length == 1)
{
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/13db3e69/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParser.cs b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
index 3d219c1..179258c 100644
--- a/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParser.cs
@@ -269,7 +269,7 @@ namespace Lucene.Net.QueryParsers.Classic
case RegexpToken.TERM:
fieldToken = Jj_consume_token(RegexpToken.TERM);
Jj_consume_token(RegexpToken.COLON);
- field = DiscardEscapeChar(fieldToken.image);
+ field = DiscardEscapeChar(fieldToken.Image);
break;
case RegexpToken.STAR:
Jj_consume_token(RegexpToken.STAR);
@@ -371,7 +371,7 @@ namespace Lucene.Net.QueryParsers.Classic
break;
case RegexpToken.BAREOPER:
term = Jj_consume_token(RegexpToken.BAREOPER);
- term.image = term.image.Substring(0, 1);
+ term.Image = term.Image.Substring(0, 1);
break;
default:
jj_la1[8] = jj_gen;
@@ -487,23 +487,23 @@ namespace Lucene.Net.QueryParsers.Classic
}
bool startOpen = false;
bool endOpen = false;
- if (goop1.kind == RegexpToken.RANGE_QUOTED)
+ if (goop1.Kind == RegexpToken.RANGE_QUOTED)
{
- goop1.image = goop1.image.Substring(1, goop1.image.Length - 2);
+ goop1.Image = goop1.Image.Substring(1, goop1.Image.Length - 2);
}
- else if ("*".Equals(goop1.image))
+ else if ("*".Equals(goop1.Image))
{
startOpen = true;
}
- if (goop2.kind == RegexpToken.RANGE_QUOTED)
+ if (goop2.Kind == RegexpToken.RANGE_QUOTED)
{
- goop2.image = goop2.image.Substring(1, goop2.image.Length - 2);
+ goop2.Image = goop2.Image.Substring(1, goop2.Image.Length - 2);
}
- else if ("*".Equals(goop2.image))
+ else if ("*".Equals(goop2.Image))
{
endOpen = true;
}
- q = GetRangeQuery(field, startOpen ? null : DiscardEscapeChar(goop1.image), endOpen ? null : DiscardEscapeChar(goop2.image), startInc, endInc);
+ q = GetRangeQuery(field, startOpen ? null : DiscardEscapeChar(goop1.Image), endOpen ? null : DiscardEscapeChar(goop2.Image), startInc, endInc);
break;
case RegexpToken.QUOTED:
term = Jj_consume_token(RegexpToken.QUOTED);
@@ -540,7 +540,7 @@ namespace Lucene.Net.QueryParsers.Classic
private bool Jj_2_1(int xla)
{
jj_la = xla;
- jj_lastpos = jj_scanpos = token;
+ jj_lastpos = jj_scanpos = Token;
try
{
return !Jj_3_1();
@@ -582,11 +582,11 @@ namespace Lucene.Net.QueryParsers.Classic
}
/// <summary>Generated Token Manager.</summary>
- public QueryParserTokenManager token_source;
+ public QueryParserTokenManager TokenSource { get; set; }
/// <summary>Current token.</summary>
- public Token token;
+ public Token Token { get; set; }
/// <summary>Next token.</summary>
- public Token jj_nt;
+ public Token Jj_nt { get; set; }
private int jj_ntk;
private Token jj_scanpos, jj_lastpos;
private int jj_la;
@@ -629,8 +629,8 @@ namespace Lucene.Net.QueryParsers.Classic
/// <summary>Constructor with user supplied <see cref="ICharStream"/>. </summary>
protected internal QueryParser(ICharStream stream)
{
- token_source = new QueryParserTokenManager(stream);
- token = new Token();
+ TokenSource = new QueryParserTokenManager(stream);
+ Token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 21; i++) jj_la1[i] = -1;
@@ -640,8 +640,8 @@ namespace Lucene.Net.QueryParsers.Classic
/// <summary>Reinitialize. </summary>
public override void ReInit(ICharStream stream)
{
- token_source.ReInit(stream);
- token = new Token();
+ TokenSource.ReInit(stream);
+ Token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 21; i++) jj_la1[i] = -1;
@@ -651,8 +651,8 @@ namespace Lucene.Net.QueryParsers.Classic
/// <summary>Constructor with generated Token Manager. </summary>
protected QueryParser(QueryParserTokenManager tm)
{
- token_source = tm;
- token = new Token();
+ TokenSource = tm;
+ Token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 21; i++) jj_la1[i] = -1;
@@ -662,8 +662,8 @@ namespace Lucene.Net.QueryParsers.Classic
/// <summary>Reinitialize. </summary>
public virtual void ReInit(QueryParserTokenManager tm)
{
- token_source = tm;
- token = new Token();
+ TokenSource = tm;
+ Token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 21; i++) jj_la1[i] = -1;
@@ -673,10 +673,10 @@ namespace Lucene.Net.QueryParsers.Classic
private Token Jj_consume_token(int kind)
{
Token oldToken;
- if ((oldToken = token).next != null) token = token.next;
- else token = token.next = token_source.GetNextToken();
+ if ((oldToken = Token).Next != null) Token = Token.Next;
+ else Token = Token.Next = TokenSource.GetNextToken();
jj_ntk = -1;
- if (token.kind == kind)
+ if (Token.Kind == kind)
{
jj_gen++;
if (++jj_gc > 100)
@@ -692,9 +692,9 @@ namespace Lucene.Net.QueryParsers.Classic
}
}
}
- return token;
+ return Token;
}
- token = oldToken;
+ Token = oldToken;
jj_kind = kind;
throw GenerateParseException();
}
@@ -712,31 +712,31 @@ namespace Lucene.Net.QueryParsers.Classic
if (jj_scanpos == jj_lastpos)
{
jj_la--;
- if (jj_scanpos.next == null)
+ if (jj_scanpos.Next == null)
{
- jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.GetNextToken();
+ jj_lastpos = jj_scanpos = jj_scanpos.Next = TokenSource.GetNextToken();
}
else
{
- jj_lastpos = jj_scanpos = jj_scanpos.next;
+ jj_lastpos = jj_scanpos = jj_scanpos.Next;
}
}
else
{
- jj_scanpos = jj_scanpos.next;
+ jj_scanpos = jj_scanpos.Next;
}
if (jj_rescan)
{
int i = 0;
- Token tok = token;
+ Token tok = Token;
while (tok != null && tok != jj_scanpos)
{
i++;
- tok = tok.next;
+ tok = tok.Next;
}
if (tok != null) Jj_add_error_token(kind, i);
}
- if (jj_scanpos.kind != kind) return true;
+ if (jj_scanpos.Kind != kind) return true;
if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;
return false;
}
@@ -744,31 +744,31 @@ namespace Lucene.Net.QueryParsers.Classic
/// <summary>Get the next Token. </summary>
public Token GetNextToken()
{
- if (token.next != null) token = token.next;
- else token = token.next = token_source.GetNextToken();
+ if (Token.Next != null) Token = Token.Next;
+ else Token = Token.Next = TokenSource.GetNextToken();
jj_ntk = -1;
jj_gen++;
- return token;
+ return Token;
}
/// <summary>Get the specific Token. </summary>
public Token GetToken(int index)
{
- Token t = token;
+ Token t = Token;
for (int i = 0; i < index; i++)
{
- if (t.next != null) t = t.next;
- else t = t.next = token_source.GetNextToken();
+ if (t.Next != null) t = t.Next;
+ else t = t.Next = TokenSource.GetNextToken();
}
return t;
}
private int Jj_ntk()
{
- if ((jj_nt = token.next) == null)
- return (jj_ntk = (token.next = token_source.GetNextToken()).kind);
+ if ((Jj_nt = Token.Next) == null)
+ return (jj_ntk = (Token.Next = TokenSource.GetNextToken()).Kind);
else
- return (jj_ntk = jj_nt.kind);
+ return (jj_ntk = Jj_nt.Kind);
}
private List<int[]> jj_expentries = new List<int[]>();
@@ -857,7 +857,7 @@ namespace Lucene.Net.QueryParsers.Classic
{
exptokseq[i] = jj_expentries[i];
}
- return new ParseException(token, exptokseq, QueryParserConstants.TokenImage);
+ return new ParseException(Token, exptokseq, QueryParserConstants.TokenImage);
}
/// <summary>Enable tracing. </summary>
@@ -914,7 +914,7 @@ namespace Lucene.Net.QueryParsers.Classic
p = p.next;
}
p.gen = jj_gen + xla - jj_la;
- p.first = token;
+ p.first = Token;
p.arg = xla;
}
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/13db3e69/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs b/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
index 3fea10d..2a70cf4 100644
--- a/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParserBase.cs
@@ -806,18 +806,18 @@ namespace Lucene.Net.QueryParsers.Classic
{
Query q;
- string termImage = DiscardEscapeChar(term.image);
+ string termImage = DiscardEscapeChar(term.Image);
if (wildcard)
{
- q = GetWildcardQuery(qfield, term.image);
+ q = GetWildcardQuery(qfield, term.Image);
}
else if (prefix)
{
- q = GetPrefixQuery(qfield, DiscardEscapeChar(term.image.Substring(0, term.image.Length - 1)));
+ q = GetPrefixQuery(qfield, DiscardEscapeChar(term.Image.Substring(0, term.Image.Length - 1)));
}
else if (regexp)
{
- q = GetRegexpQuery(qfield, term.image.Substring(1, term.image.Length - 2));
+ q = GetRegexpQuery(qfield, term.Image.Substring(1, term.Image.Length - 2));
}
else if (fuzzy)
{
@@ -836,7 +836,7 @@ namespace Lucene.Net.QueryParsers.Classic
float fms = FuzzyMinSim;
try
{
- fms = float.Parse(fuzzySlop.image.Substring(1), Locale);
+ fms = float.Parse(fuzzySlop.Image.Substring(1), Locale);
}
catch (Exception /*ignored*/) { }
if (fms < 0.0f)
@@ -859,11 +859,11 @@ namespace Lucene.Net.QueryParsers.Classic
{
try
{
- s = (int)float.Parse(fuzzySlop.image.Substring(1), Locale);
+ s = (int)float.Parse(fuzzySlop.Image.Substring(1), Locale);
}
catch (Exception /*ignored*/) { }
}
- return GetFieldQuery(qfield, DiscardEscapeChar(term.image.Substring(1, term.image.Length - 2)), s);
+ return GetFieldQuery(qfield, DiscardEscapeChar(term.Image.Substring(1, term.Image.Length - 2)), s);
}
// extracted from the .jj grammar
@@ -874,7 +874,7 @@ namespace Lucene.Net.QueryParsers.Classic
float f = (float)1.0;
try
{
- f = float.Parse(boost.image, Locale);
+ f = float.Parse(boost.Image, Locale);
}
catch (Exception /*ignored*/)
{
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/13db3e69/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs b/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
index 8e5da42..b8f2c17 100644
--- a/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
+++ b/src/Lucene.Net.QueryParser/Classic/QueryParserTokenManager.cs
@@ -1,3 +1,4 @@
+using Lucene.Net.Support;
using System;
using System.Diagnostics.CodeAnalysis;
using System.IO;
@@ -33,11 +34,11 @@ namespace Lucene.Net.QueryParsers.Classic
}
/// <summary>Debug output. </summary>
- public TextWriter debugStream;
+ private TextWriter debugStream; // LUCENENET specific - made private, since we already have a setter
/// <summary>Set debug output. </summary>
public virtual void SetDebugStream(TextWriter ds)
{
- debugStream = ds;
+ debugStream = new SafeTextWriterWrapper(ds);
}
private int JjStopStringLiteralDfa_2(int pos, long active0)
{
@@ -1217,10 +1218,10 @@ namespace Lucene.Net.QueryParsers.Classic
endColumn = m_input_stream.EndColumn;
t = Token.NewToken(jjmatchedKind, curTokenImage);
- t.beginLine = beginLine;
- t.endLine = endLine;
- t.beginColumn = beginColumn;
- t.endColumn = endColumn;
+ t.BeginLine = beginLine;
+ t.EndLine = endLine;
+ t.BeginColumn = beginColumn;
+ t.EndColumn = endColumn;
return t;
}
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/13db3e69/src/Lucene.Net.QueryParser/Classic/Token.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Classic/Token.cs b/src/Lucene.Net.QueryParser/Classic/Token.cs
index 240693c..ad24d2c 100644
--- a/src/Lucene.Net.QueryParser/Classic/Token.cs
+++ b/src/Lucene.Net.QueryParser/Classic/Token.cs
@@ -33,53 +33,53 @@ namespace Lucene.Net.QueryParsers.Classic
/// system is determined by JavaCCParser, and a table of these numbers is
/// stored in the file ...Constants.java.
/// </summary>
- public int kind;
-
- /// <summary>The line number of the first character of this Token. </summary>
- public int beginLine;
- /// <summary>The column number of the first character of this Token. </summary>
- public int beginColumn;
- /// <summary>The line number of the last character of this Token. </summary>
- public int endLine;
- /// <summary>The column number of the last character of this Token. </summary>
- public int endColumn;
-
- /// <summary>The string image of the token.</summary>
- public string image;
-
- /// <summary>
+ public int Kind { get; set; }
+
+ /// <summary>The line number of the first character of this Token. </summary>
+ public int BeginLine { get; set; }
+ /// <summary>The column number of the first character of this Token. </summary>
+ public int BeginColumn { get; set; }
+ /// <summary>The line number of the last character of this Token. </summary>
+ public int EndLine { get; set; }
+ /// <summary>The column number of the last character of this Token. </summary>
+ public int EndColumn { get; set; }
+
+ /// <summary>The string image of the token.</summary>
+ public string Image { get; set; }
+
+ /// <summary>
/// A reference to the next regular (non-special) token from the input
- /// stream. If this is the last token from the input stream, or if the
- /// token manager has not read tokens beyond this one, this field is
- /// set to null. This is true only if this token is also a regular
- /// token. Otherwise, see below for a description of the contents of
- /// this field.
- /// </summary>
- public Token next;
-
- /// <summary>
+ /// stream. If this is the last token from the input stream, or if the
+ /// token manager has not read tokens beyond this one, this field is
+ /// set to null. This is true only if this token is also a regular
+ /// token. Otherwise, see below for a description of the contents of
+ /// this field.
+ /// </summary>
+ public Token Next { get; set; }
+
+ /// <summary>
/// This field is used to access special tokens that occur prior to this
- /// token, but after the immediately preceding regular (non-special) token.
- /// If there are no such special tokens, this field is set to null.
- /// When there are more than one such special token, this field refers
- /// to the last of these special tokens, which in turn refers to the next
- /// previous special token through its specialToken field, and so on
- /// until the first special token (whose specialToken field is null).
- /// The next fields of special tokens refer to other special tokens that
- /// immediately follow it (without an intervening regular token). If there
- /// is no such token, this field is null.
- /// </summary>
- public Token specialToken;
+ /// token, but after the immediately preceding regular (non-special) token.
+ /// If there are no such special tokens, this field is set to null.
+ /// When there are more than one such special token, this field refers
+ /// to the last of these special tokens, which in turn refers to the next
+ /// previous special token through its specialToken field, and so on
+ /// until the first special token (whose specialToken field is null).
+ /// The next fields of special tokens refer to other special tokens that
+ /// immediately follow it (without an intervening regular token). If there
+ /// is no such token, this field is null.
+ /// </summary>
+ public Token SpecialToken { get; set; }
- /// <summary>
+ /// <summary>
/// An optional attribute value of the Token.
- /// Tokens which are not used as syntactic sugar will often contain
- /// meaningful values that will be used later on by the compiler or
- /// interpreter. This attribute value is often different from the image.
- /// Any subclass of Token that actually wants to return a non-null value can
- /// override this method as appropriate.
- /// </summary>
- public virtual object Value
+ /// Tokens which are not used as syntactic sugar will often contain
+ /// meaningful values that will be used later on by the compiler or
+ /// interpreter. This attribute value is often different from the image.
+ /// Any subclass of Token that actually wants to return a non-null value can
+ /// override this method as appropriate.
+ /// </summary>
+ public virtual object Value
{
get { return null; }
}
@@ -104,8 +104,8 @@ namespace Lucene.Net.QueryParsers.Classic
/// </summary>
public Token(int kind, string image)
{
- this.kind = kind;
- this.image = image;
+ this.Kind = kind;
+ this.Image = image;
}
/// <summary>
@@ -113,7 +113,7 @@ namespace Lucene.Net.QueryParsers.Classic
/// </summary>
public override string ToString()
{
- return image;
+ return Image;
}
/// <summary>
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/13db3e69/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/NumericRangeQueryNode.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/NumericRangeQueryNode.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/NumericRangeQueryNode.cs
index 70ef44c..a6f161c 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/NumericRangeQueryNode.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Nodes/NumericRangeQueryNode.cs
@@ -33,7 +33,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes
/// <seealso cref="AbstractRangeQueryNode{T}"/>
public class NumericRangeQueryNode : AbstractRangeQueryNode<NumericQueryNode>
{
- public NumericConfig numericConfig;
+ private NumericConfig numericConfig; // LUCENENET specific: made private and added a public setter to the property
/// <summary>
/// Constructs a <see cref="NumericRangeQueryNode"/> object using the given
@@ -142,6 +142,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Nodes
public virtual NumericConfig NumericConfig
{
get { return this.numericConfig; }
+ set { this.numericConfig = value; } // LUCENENET specific: made the field private and added setter (confusing)
}
public override string ToString()
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/13db3e69/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/ParseException.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/ParseException.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/ParseException.cs
index 7c017e6..d3d8817 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/ParseException.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/ParseException.cs
@@ -1,7 +1,9 @@
\ufeffusing Lucene.Net.QueryParsers.Flexible.Core;
using Lucene.Net.QueryParsers.Flexible.Core.Messages;
using Lucene.Net.QueryParsers.Flexible.Messages;
+using Lucene.Net.Support;
using System;
+using System.Diagnostics.CodeAnalysis;
using System.Text;
namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
@@ -80,21 +82,40 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
/// this object has been created due to a parse error, the token
/// followng this token will (therefore) be the first error token.
/// </summary>
- public Token currentToken;
+ public Token CurrentToken
+ {
+ get { return currentToken; }
+ set { currentToken = value; }
+ }
+ private Token currentToken;
/// <summary>
/// Each entry in this array is an array of integers. Each array
/// of integers represents a sequence of tokens (by their ordinal
/// values) that is expected at this point of the parse.
/// </summary>
- public int[][] expectedTokenSequences;
+ [WritableArray]
+ [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")]
+ public int[][] ExpectedTokenSequences
+ {
+ get { return expectedTokenSequences; }
+ set { expectedTokenSequences = value; }
+ }
+ private int[][] expectedTokenSequences;
/// <summary>
/// This is a reference to the "tokenImage" array of the generated
/// parser within which the parse error occurred. This array is
/// defined in the generated ...Constants interface.
/// </summary>
- public string[] tokenImage;
+ [WritableArray]
+ [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")]
+ public string[] TokenImage
+ {
+ get { return tokenImage; }
+ set { tokenImage = value; }
+ }
+ private string[] tokenImage;
/// <summary>
/// It uses <paramref name="currentToken"/> and <paramref name="expectedTokenSequences"/> to generate a parse
@@ -132,22 +153,22 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
expected.Append(eol).Append(" ");
}
string retval = "Encountered \"";
- Token tok = currentToken.next;
+ Token tok = currentToken.Next;
for (int i = 0; i < maxSize; i++)
{
if (i != 0) retval += " ";
- if (tok.kind == 0)
+ if (tok.Kind == 0)
{
retval += tokenImage[0];
break;
}
- retval += " " + tokenImage[tok.kind];
+ retval += " " + tokenImage[tok.Kind];
retval += " \"";
- retval += Add_Escapes(tok.image);
+ retval += Add_Escapes(tok.Image);
retval += " \"";
- tok = tok.next;
+ tok = tok.Next;
}
- retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn;
+ retval += "\" at line " + currentToken.Next.BeginLine + ", column " + currentToken.Next.BeginColumn;
retval += "." + eol;
if (expectedTokenSequences.Length == 1)
{
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/13db3e69/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs
index 5ea39a7..d3514d9 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParser.cs
@@ -366,7 +366,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
Jj_consume_token(-1);
throw new ParseException();
}
- field = EscapeQuerySyntax.DiscardEscapeChar(fieldToken.image).ToString();
+ field = EscapeQuerySyntax.DiscardEscapeChar(fieldToken.Image).ToString();
q = Term(field);
break;
case RegexpToken.OP_LESSTHAN:
@@ -392,7 +392,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
Jj_consume_token(-1);
throw new ParseException();
}
- field = EscapeQuerySyntax.DiscardEscapeChar(fieldToken.image).ToString();
+ field = EscapeQuerySyntax.DiscardEscapeChar(fieldToken.Image).ToString();
switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
{
case RegexpToken.TERM:
@@ -409,20 +409,20 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
Jj_consume_token(-1);
throw new ParseException();
}
- if (term.kind == RegexpToken.QUOTED)
+ if (term.Kind == RegexpToken.QUOTED)
{
- term.image = term.image.Substring(1, (term.image.Length - 1) - 1);
+ term.Image = term.Image.Substring(1, (term.Image.Length - 1) - 1);
}
- switch (@operator.kind)
+ switch (@operator.Kind)
{
case RegexpToken.OP_LESSTHAN:
lowerInclusive = true;
upperInclusive = false;
qLower = new FieldQueryNode(field,
- "*", term.beginColumn, term.endColumn);
+ "*", term.BeginColumn, term.EndColumn);
qUpper = new FieldQueryNode(field,
- EscapeQuerySyntax.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn);
+ EscapeQuerySyntax.DiscardEscapeChar(term.Image), term.BeginColumn, term.EndColumn);
break;
case RegexpToken.OP_LESSTHANEQ:
@@ -430,27 +430,27 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
upperInclusive = true;
qLower = new FieldQueryNode(field,
- "*", term.beginColumn, term.endColumn);
+ "*", term.BeginColumn, term.EndColumn);
qUpper = new FieldQueryNode(field,
- EscapeQuerySyntax.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn);
+ EscapeQuerySyntax.DiscardEscapeChar(term.Image), term.BeginColumn, term.EndColumn);
break;
case RegexpToken.OP_MORETHAN:
lowerInclusive = false;
upperInclusive = true;
qLower = new FieldQueryNode(field,
- EscapeQuerySyntax.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn);
+ EscapeQuerySyntax.DiscardEscapeChar(term.Image), term.BeginColumn, term.EndColumn);
qUpper = new FieldQueryNode(field,
- "*", term.beginColumn, term.endColumn);
+ "*", term.BeginColumn, term.EndColumn);
break;
case RegexpToken.OP_MORETHANEQ:
lowerInclusive = true;
upperInclusive = true;
qLower = new FieldQueryNode(field,
- EscapeQuerySyntax.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn);
+ EscapeQuerySyntax.DiscardEscapeChar(term.Image), term.BeginColumn, term.EndColumn);
qUpper = new FieldQueryNode(field,
- "*", term.beginColumn, term.endColumn);
+ "*", term.BeginColumn, term.EndColumn);
break;
default:
{ if (true) throw new Exception("Unhandled case: operator=" + @operator.ToString()); }
@@ -490,7 +490,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
Jj_consume_token(-1);
throw new ParseException();
}
- field = EscapeQuerySyntax.DiscardEscapeChar(fieldToken.image).ToString();
+ field = EscapeQuerySyntax.DiscardEscapeChar(fieldToken.Image).ToString();
}
else
{
@@ -539,7 +539,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
float f = (float)1.0;
try
{
- f = Convert.ToSingle(boost.image, CultureInfo.InvariantCulture);
+ f = Convert.ToSingle(boost.Image, CultureInfo.InvariantCulture);
// avoid boosting null queries, such as those caused by stop words
if (q != null)
{
@@ -581,7 +581,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
{
case RegexpToken.TERM:
term = Jj_consume_token(RegexpToken.TERM);
- q = new FieldQueryNode(field, EscapeQuerySyntax.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn);
+ q = new FieldQueryNode(field, EscapeQuerySyntax.DiscardEscapeChar(term.Image), term.BeginColumn, term.EndColumn);
break;
case RegexpToken.REGEXPTERM:
term = Jj_consume_token(RegexpToken.REGEXPTERM);
@@ -630,7 +630,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
float fms = defaultMinSimilarity;
try
{
- fms = Convert.ToSingle(fuzzySlop.image.Substring(1), CultureInfo.InvariantCulture);
+ fms = Convert.ToSingle(fuzzySlop.Image.Substring(1), CultureInfo.InvariantCulture);
}
#pragma warning disable 168
catch (Exception ignored) { }
@@ -643,11 +643,11 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
{
{ if (true) throw new ParseException(new Message(QueryParserMessages.INVALID_SYNTAX_FUZZY_EDITS)); }
}
- q = new FuzzyQueryNode(field, EscapeQuerySyntax.DiscardEscapeChar(term.image), fms, term.beginColumn, term.endColumn);
+ q = new FuzzyQueryNode(field, EscapeQuerySyntax.DiscardEscapeChar(term.Image), fms, term.BeginColumn, term.EndColumn);
}
else if (regexp)
{
- string re = term.image.Substring(1, (term.image.Length - 1) - 1);
+ string re = term.Image.Substring(1, (term.Image.Length - 1) - 1);
q = new RegexpQueryNode(field, re, 0, re.Length);
}
break;
@@ -726,24 +726,24 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
jj_la1[24] = jj_gen;
break;
}
- if (goop1.kind == RegexpToken.RANGE_QUOTED)
+ if (goop1.Kind == RegexpToken.RANGE_QUOTED)
{
- goop1.image = goop1.image.Substring(1, (goop1.image.Length - 1) - 1);
+ goop1.Image = goop1.Image.Substring(1, (goop1.Image.Length - 1) - 1);
}
- if (goop2.kind == RegexpToken.RANGE_QUOTED)
+ if (goop2.Kind == RegexpToken.RANGE_QUOTED)
{
- goop2.image = goop2.image.Substring(1, (goop2.image.Length - 1) - 1);
+ goop2.Image = goop2.Image.Substring(1, (goop2.Image.Length - 1) - 1);
}
qLower = new FieldQueryNode(field,
- EscapeQuerySyntax.DiscardEscapeChar(goop1.image), goop1.beginColumn, goop1.endColumn);
+ EscapeQuerySyntax.DiscardEscapeChar(goop1.Image), goop1.BeginColumn, goop1.EndColumn);
qUpper = new FieldQueryNode(field,
- EscapeQuerySyntax.DiscardEscapeChar(goop2.image), goop2.beginColumn, goop2.endColumn);
+ EscapeQuerySyntax.DiscardEscapeChar(goop2.Image), goop2.BeginColumn, goop2.EndColumn);
q = new TermRangeQueryNode(qLower, qUpper, startInc ? true : false, endInc ? true : false);
break;
case RegexpToken.QUOTED:
term = Jj_consume_token(RegexpToken.QUOTED);
- q = new QuotedFieldQueryNode(field, EscapeQuerySyntax.DiscardEscapeChar(term.image.Substring(1, (term.image.Length - 1) - 1)), term.beginColumn + 1, term.endColumn - 1);
+ q = new QuotedFieldQueryNode(field, EscapeQuerySyntax.DiscardEscapeChar(term.Image.Substring(1, (term.Image.Length - 1) - 1)), term.BeginColumn + 1, term.EndColumn - 1);
switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk)
{
case RegexpToken.FUZZY_SLOP:
@@ -769,7 +769,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
{
try
{
- phraseSlop = (int)Convert.ToSingle(fuzzySlop.image.Substring(1), CultureInfo.InvariantCulture);
+ phraseSlop = (int)Convert.ToSingle(fuzzySlop.Image.Substring(1), CultureInfo.InvariantCulture);
q = new SlopQueryNode(q, phraseSlop);
}
#pragma warning disable 168
@@ -792,7 +792,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
float f = (float)1.0;
try
{
- f = Convert.ToSingle(boost.image, CultureInfo.InvariantCulture);
+ f = Convert.ToSingle(boost.Image, CultureInfo.InvariantCulture);
// avoid boosting null queries, such as those caused by stop words
if (q != null)
{
@@ -814,7 +814,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
private bool Jj_2_1(int xla)
{
- jj_la = xla; jj_lastpos = jj_scanpos = token;
+ jj_la = xla; jj_lastpos = jj_scanpos = Token;
try { return !Jj_3_1(); }
#pragma warning disable 168
catch (LookaheadSuccess ls) { return true; }
@@ -824,7 +824,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
private bool Jj_2_2(int xla)
{
- jj_la = xla; jj_lastpos = jj_scanpos = token;
+ jj_la = xla; jj_lastpos = jj_scanpos = Token;
try { return !Jj_3_2(); }
#pragma warning disable 168
catch (LookaheadSuccess ls) { return true; }
@@ -970,11 +970,11 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
}
/// <summary>Generated Token Manager.</summary>
- public StandardSyntaxParserTokenManager token_source;
+ public StandardSyntaxParserTokenManager TokenSource { get; set; }
/// <summary>Current token.</summary>
- public Token token;
+ public Token Token { get; set; }
/// <summary>Next token.</summary>
- public Token jj_nt;
+ public Token Jj_nt { get; set; }
private int jj_ntk;
private Token jj_scanpos, jj_lastpos;
private int jj_la;
@@ -1004,8 +1004,8 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
/// </summary>
public StandardSyntaxParser(ICharStream stream)
{
- token_source = new StandardSyntaxParserTokenManager(stream);
- token = new Token();
+ TokenSource = new StandardSyntaxParserTokenManager(stream);
+ Token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 28; i++) jj_la1[i] = -1;
@@ -1015,8 +1015,8 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
/// <summary>Reinitialize.</summary>
public void ReInit(ICharStream stream)
{
- token_source.ReInit(stream);
- token = new Token();
+ TokenSource.ReInit(stream);
+ Token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 28; i++) jj_la1[i] = -1;
@@ -1026,8 +1026,8 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
/// <summary>Constructor with generated Token Manager.</summary>
public StandardSyntaxParser(StandardSyntaxParserTokenManager tm)
{
- token_source = tm;
- token = new Token();
+ TokenSource = tm;
+ Token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 28; i++) jj_la1[i] = -1;
@@ -1037,8 +1037,8 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
/// <summary>Reinitialize.</summary>
public void ReInit(StandardSyntaxParserTokenManager tm)
{
- token_source = tm;
- token = new Token();
+ TokenSource = tm;
+ Token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 28; i++) jj_la1[i] = -1;
@@ -1048,10 +1048,10 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
private Token Jj_consume_token(int kind)
{
Token oldToken;
- if ((oldToken = token).next != null) token = token.next;
- else token = token.next = token_source.GetNextToken();
+ if ((oldToken = Token).Next != null) Token = Token.Next;
+ else Token = Token.Next = TokenSource.GetNextToken();
jj_ntk = -1;
- if (token.kind == kind)
+ if (Token.Kind == kind)
{
jj_gen++;
if (++jj_gc > 100)
@@ -1067,9 +1067,9 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
}
}
}
- return token;
+ return Token;
}
- token = oldToken;
+ Token = oldToken;
jj_kind = kind;
throw GenerateParseException();
}
@@ -1081,26 +1081,26 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
if (jj_scanpos == jj_lastpos)
{
jj_la--;
- if (jj_scanpos.next == null)
+ if (jj_scanpos.Next == null)
{
- jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.GetNextToken();
+ jj_lastpos = jj_scanpos = jj_scanpos.Next = TokenSource.GetNextToken();
}
else
{
- jj_lastpos = jj_scanpos = jj_scanpos.next;
+ jj_lastpos = jj_scanpos = jj_scanpos.Next;
}
}
else
{
- jj_scanpos = jj_scanpos.next;
+ jj_scanpos = jj_scanpos.Next;
}
if (jj_rescan)
{
- int i = 0; Token tok = token;
- while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; }
+ int i = 0; Token tok = Token;
+ while (tok != null && tok != jj_scanpos) { i++; tok = tok.Next; }
if (tok != null) Jj_add_error_token(kind, i);
}
- if (jj_scanpos.kind != kind) return true;
+ if (jj_scanpos.Kind != kind) return true;
if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;
return false;
}
@@ -1109,31 +1109,31 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
/// <summary>Get the next Token.</summary>
public Token GetNextToken()
{
- if (token.next != null) token = token.next;
- else token = token.next = token_source.GetNextToken();
+ if (Token.Next != null) Token = Token.Next;
+ else Token = Token.Next = TokenSource.GetNextToken();
jj_ntk = -1;
jj_gen++;
- return token;
+ return Token;
}
/// <summary>Get the specific Token.</summary>
public Token GetToken(int index)
{
- Token t = token;
+ Token t = Token;
for (int i = 0; i < index; i++)
{
- if (t.next != null) t = t.next;
- else t = t.next = token_source.GetNextToken();
+ if (t.Next != null) t = t.Next;
+ else t = t.Next = TokenSource.GetNextToken();
}
return t;
}
private int Jj_ntk()
{
- if ((jj_nt = token.next) == null)
- return (jj_ntk = (token.next = token_source.GetNextToken()).kind);
+ if ((Jj_nt = Token.Next) == null)
+ return (jj_ntk = (Token.Next = TokenSource.GetNextToken()).Kind);
else
- return (jj_ntk = jj_nt.kind);
+ return (jj_ntk = Jj_nt.Kind);
}
private List<int[]> jj_expentries = new List<int[]>();
@@ -1222,7 +1222,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
{
exptokseq[i] = jj_expentries[i];
}
- return new ParseException(token, exptokseq, StandardSyntaxParserConstants.TokenImage);
+ return new ParseException(Token, exptokseq, StandardSyntaxParserConstants.TokenImage);
}
/// <summary>Enable tracing.</summary>
@@ -1272,7 +1272,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
if (p.next == null) { p = p.next = new JJCalls(); break; }
p = p.next;
}
- p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla;
+ p.gen = jj_gen + xla - jj_la; p.first = Token; p.arg = xla;
}
internal sealed class JJCalls
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/13db3e69/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParserTokenManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParserTokenManager.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParserTokenManager.cs
index 9723a47..e04feef 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParserTokenManager.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/StandardSyntaxParserTokenManager.cs
@@ -1,4 +1,5 @@
-\ufeffusing System;
+\ufeffusing Lucene.Net.Support;
+using System;
using System.IO;
namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
@@ -26,9 +27,9 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
public class StandardSyntaxParserTokenManager /*: StandardSyntaxParserConstants*/
{
/// <summary>Debug output.</summary>
- public TextWriter debugStream = Console.Out;
+ private TextWriter debugStream = Console.Out; // LUCENENET specific - made private, since we already have a setter
/// <summary>Set debug output.</summary>
- public void SetDebugStream(TextWriter ds) { debugStream = ds; }
+ public void SetDebugStream(TextWriter ds) { debugStream = new SafeTextWriterWrapper(ds); }
private int JjStopStringLiteralDfa_2(int pos, long active0)
{
switch (pos)
@@ -853,10 +854,10 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
endColumn = m_input_stream.EndColumn;
t = Token.NewToken(jjmatchedKind, curTokenImage);
- t.beginLine = beginLine;
- t.endLine = endLine;
- t.beginColumn = beginColumn;
- t.endColumn = endColumn;
+ t.BeginLine = beginLine;
+ t.EndLine = endLine;
+ t.BeginColumn = beginColumn;
+ t.EndColumn = endColumn;
return t;
}
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/13db3e69/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/Token.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/Token.cs b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/Token.cs
index 3010278..3d3bffa 100644
--- a/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/Token.cs
+++ b/src/Lucene.Net.QueryParser/Flexible/Standard/Parser/Token.cs
@@ -32,21 +32,21 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
/// system is determined by JavaCCParser, and a table of these numbers is
/// stored in the file ...Constants.java.
/// </summary>
- public int kind;
+ public int Kind { get; set; }
/// <summary>The line number of the first character of this Token.</summary>
- public int beginLine;
+ public int BeginLine { get; set; }
/// <summary>The column number of the first character of this Token.</summary>
- public int beginColumn;
+ public int BeginColumn { get; set; }
/// <summary>The line number of the last character of this Token.</summary>
- public int endLine;
+ public int EndLine { get; set; }
/// <summary>The column number of the last character of this Token.</summary>
- public int endColumn;
+ public int EndColumn { get; set; }
/// <summary>
/// The string image of the token.
/// </summary>
- public string image;
+ public string Image { get; set; }
/// <summary>
/// A reference to the next regular (non-special) token from the input
@@ -56,7 +56,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
/// token. Otherwise, see below for a description of the contents of
/// this field.
/// </summary>
- public Token next;
+ public Token Next { get; set; }
/// <summary>
/// This field is used to access special tokens that occur prior to this
@@ -70,7 +70,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
/// immediately follow it (without an intervening regular token). If there
/// is no such token, this field is null.
/// </summary>
- public Token specialToken;
+ public Token SpecialToken { get; set; }
/// <summary>
/// An optional attribute value of the Token.
@@ -103,8 +103,8 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
/// </summary>
public Token(int kind, string image)
{
- this.kind = kind;
- this.image = image;
+ this.Kind = kind;
+ this.Image = image;
}
/// <summary>
@@ -112,7 +112,7 @@ namespace Lucene.Net.QueryParsers.Flexible.Standard.Parser
/// </summary>
public override string ToString()
{
- return image;
+ return Image;
}
/// <summary>
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/13db3e69/src/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs b/src/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
index 22092ec..c2bdf27 100644
--- a/src/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/ParseException.cs
@@ -1,4 +1,6 @@
-\ufeffusing System;
+\ufeffusing Lucene.Net.Support;
+using System;
+using System.Diagnostics.CodeAnalysis;
using System.Text;
namespace Lucene.Net.QueryParsers.Surround.Parser
@@ -77,21 +79,40 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
/// this object has been created due to a parse error, the token
/// following this token will (therefore) be the first error token.
/// </summary>
- public Token currentToken;
+ public Token CurrentToken
+ {
+ get { return currentToken; }
+ set { currentToken = value; }
+ }
+ private Token currentToken;
/// <summary>
/// Each entry in this array is an array of integers. Each array
/// of integers represents a sequence of tokens (by their ordinal
/// values) that is expected at this point of the parse.
/// </summary>
- public int[][] expectedTokenSequences;
+ [WritableArray]
+ [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")]
+ public int[][] ExpectedTokenSequences
+ {
+ get { return expectedTokenSequences; }
+ set { expectedTokenSequences = value; }
+ }
+ private int[][] expectedTokenSequences;
/// <summary>
/// This is a reference to the "tokenImage" array of the generated
/// parser within which the parse error occurred. This array is
/// defined in the generated ...Constants interface.
/// </summary>
- public string[] tokenImage;
+ [WritableArray]
+ [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")]
+ public string[] TokenImage
+ {
+ get { return tokenImage; }
+ set { tokenImage = value; }
+ }
+ private string[] tokenImage;
/// <summary>
@@ -125,23 +146,23 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
expected.Append(eol).Append(" ");
}
string retval = "Encountered \"";
- Token tok = currentToken.next;
+ Token tok = currentToken.Next;
for (int i = 0; i < maxSize; i++)
{
if (i != 0)
retval += " ";
- if (tok.kind == 0)
+ if (tok.Kind == 0)
{
retval += tokenImage[0];
break;
}
- retval += (" " + tokenImage[tok.kind]);
+ retval += (" " + tokenImage[tok.Kind]);
retval += " \"";
- retval += Add_escapes(tok.image);
+ retval += Add_escapes(tok.Image);
retval += " \"";
- tok = tok.next;
+ tok = tok.Next;
}
- retval += ("\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn);
+ retval += ("\" at line " + currentToken.Next.BeginLine + ", column " + currentToken.Next.BeginColumn);
retval += ("." + eol);
if (expectedTokenSequences.Length == 1)
{
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/13db3e69/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
index b1c2fe1..5a9d0c1 100644
--- a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParser.cs
@@ -101,17 +101,17 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
protected virtual SrndQuery GetOrQuery(IEnumerable<SrndQuery> queries, bool infix, Token orToken)
{
- return new OrQuery(queries, infix, orToken.image);
+ return new OrQuery(queries, infix, orToken.Image);
}
protected virtual SrndQuery GetAndQuery(IEnumerable<SrndQuery> queries, bool infix, Token andToken)
{
- return new AndQuery(queries, infix, andToken.image);
+ return new AndQuery(queries, infix, andToken.Image);
}
protected virtual SrndQuery GetNotQuery(IEnumerable<SrndQuery> queries, Token notToken)
{
- return new NotQuery(queries, notToken.image);
+ return new NotQuery(queries, notToken.Image);
}
protected static int GetOpDistance(string distanceOp)
@@ -139,10 +139,10 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
{
DistanceQuery dq = new DistanceQuery(queries,
infix,
- GetOpDistance(dToken.image),
- dToken.image,
+ GetOpDistance(dToken.Image),
+ dToken.Image,
ordered);
- CheckDistanceSubQueries(dq, dToken.image);
+ CheckDistanceSubQueries(dq, dToken.Image);
return dq;
}
@@ -224,7 +224,7 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
{
fieldNames = new List<string>();
}
- fieldNames.Add(fieldName.image);
+ fieldNames.Add(fieldName.Image);
}
label_1:
{ if (true) return fieldNames; }
@@ -500,38 +500,38 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
{
case RegexpToken.TERM:
term = Jj_consume_token(RegexpToken.TERM);
- { if (true) return GetTermQuery(term.image, false /* not quoted */); }
+ { if (true) return GetTermQuery(term.Image, false /* not quoted */); }
//break; // unreachable
case RegexpToken.QUOTED:
term = Jj_consume_token(RegexpToken.QUOTED);
- { if (true) return GetTermQuery(term.image.Substring(1, (term.image.Length - 1) - 1), true /* quoted */); }
+ { if (true) return GetTermQuery(term.Image.Substring(1, (term.Image.Length - 1) - 1), true /* quoted */); }
//break; // unreachable
case RegexpToken.SUFFIXTERM:
term = Jj_consume_token(RegexpToken.SUFFIXTERM);
/* ending in * */
- if (!AllowedSuffix(term.image))
+ if (!AllowedSuffix(term.Image))
{
- { if (true) throw new ParseException(truncationErrorMessage + term.image); }
+ { if (true) throw new ParseException(truncationErrorMessage + term.Image); }
}
- { if (true) return GetPrefixQuery(term.image.Substring(0, term.image.Length - 1), false /* not quoted */); }
+ { if (true) return GetPrefixQuery(term.Image.Substring(0, term.Image.Length - 1), false /* not quoted */); }
//break; // unreachable
case RegexpToken.TRUNCTERM:
term = Jj_consume_token(RegexpToken.TRUNCTERM);
/* with at least one * or ? */
- if (!AllowedTruncation(term.image))
+ if (!AllowedTruncation(term.Image))
{
- { if (true) throw new ParseException(truncationErrorMessage + term.image); }
+ { if (true) throw new ParseException(truncationErrorMessage + term.Image); }
}
- { if (true) return GetTruncQuery(term.image); }
+ { if (true) return GetTruncQuery(term.Image); }
//break; // unreachable
case RegexpToken.TRUNCQUOTED:
term = Jj_consume_token(RegexpToken.TRUNCQUOTED);
/* eg. "9b-b,m"* */
- if ((term.image.Length - 3) < minimumPrefixLength)
+ if ((term.Image.Length - 3) < minimumPrefixLength)
{
- { if (true) throw new ParseException(truncationErrorMessage + term.image); }
+ { if (true) throw new ParseException(truncationErrorMessage + term.Image); }
}
- { if (true) return GetPrefixQuery(term.image.Substring(1, (term.image.Length - 2) - 1), true /* quoted */); }
+ { if (true) return GetPrefixQuery(term.Image.Substring(1, (term.Image.Length - 2) - 1), true /* quoted */); }
//break; // unreachable
default:
jj_la1[8] = jj_gen;
@@ -561,15 +561,15 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
try
{
// LUCENENET TODO: Test parsing float in various cultures (.NET)
- f = float.Parse(weight.image);
+ f = float.Parse(weight.Image);
}
catch (Exception floatExc)
{
- { if (true) throw new ParseException(boostErrorMessage + weight.image + " (" + floatExc + ")"); }
+ { if (true) throw new ParseException(boostErrorMessage + weight.Image + " (" + floatExc + ")"); }
}
if (f <= 0.0)
{
- { if (true) throw new ParseException(boostErrorMessage + weight.image); }
+ { if (true) throw new ParseException(boostErrorMessage + weight.Image); }
}
q.Weight = (f * q.Weight); /* left associative, fwiw */
}
@@ -578,7 +578,7 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
private bool Jj_2_1(int xla)
{
- jj_la = xla; jj_lastpos = jj_scanpos = token;
+ jj_la = xla; jj_lastpos = jj_scanpos = Token;
try { return !Jj_3_1(); }
catch (LookaheadSuccess) { return true; }
finally { Jj_save(0, xla); }
@@ -592,11 +592,11 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
}
/// <summary>Generated Token Manager.</summary>
- public QueryParserTokenManager token_source;
+ public QueryParserTokenManager TokenSource { get; set; }
/// <summary>Current token.</summary>
- public Token token;
+ public Token Token { get; set; }
/// <summary>Next token.</summary>
- public Token jj_nt;
+ public Token Jj_nt { get; set; }
private int jj_ntk;
private Token jj_scanpos, jj_lastpos;
private int jj_la;
@@ -619,8 +619,8 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
/// <summary>Constructor with user supplied <see cref="ICharStream"/>.</summary>
public QueryParser(ICharStream stream)
{
- token_source = new QueryParserTokenManager(stream);
- token = new Token();
+ TokenSource = new QueryParserTokenManager(stream);
+ Token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 10; i++) jj_la1[i] = -1;
@@ -630,8 +630,8 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
/// <summary>Reinitialize.</summary>
public virtual void ReInit(ICharStream stream)
{
- token_source.ReInit(stream);
- token = new Token();
+ TokenSource.ReInit(stream);
+ Token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 10; i++) jj_la1[i] = -1;
@@ -641,8 +641,8 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
/// <summary>Constructor with generated Token Manager.</summary>
public QueryParser(QueryParserTokenManager tm)
{
- token_source = tm;
- token = new Token();
+ TokenSource = tm;
+ Token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 10; i++) jj_la1[i] = -1;
@@ -652,8 +652,8 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
/// <summary>Reinitialize.</summary>
public virtual void ReInit(QueryParserTokenManager tm)
{
- token_source = tm;
- token = new Token();
+ TokenSource = tm;
+ Token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 10; i++) jj_la1[i] = -1;
@@ -663,10 +663,10 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
private Token Jj_consume_token(int kind)
{
Token oldToken;
- if ((oldToken = token).next != null) token = token.next;
- else token = token.next = token_source.GetNextToken();
+ if ((oldToken = Token).Next != null) Token = Token.Next;
+ else Token = Token.Next = TokenSource.GetNextToken();
jj_ntk = -1;
- if (token.kind == kind)
+ if (Token.Kind == kind)
{
jj_gen++;
if (++jj_gc > 100)
@@ -682,9 +682,9 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
}
}
}
- return token;
+ return Token;
}
- token = oldToken;
+ Token = oldToken;
jj_kind = kind;
throw GenerateParseException();
}
@@ -697,26 +697,26 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
if (jj_scanpos == jj_lastpos)
{
jj_la--;
- if (jj_scanpos.next == null)
+ if (jj_scanpos.Next == null)
{
- jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.GetNextToken();
+ jj_lastpos = jj_scanpos = jj_scanpos.Next = TokenSource.GetNextToken();
}
else
{
- jj_lastpos = jj_scanpos = jj_scanpos.next;
+ jj_lastpos = jj_scanpos = jj_scanpos.Next;
}
}
else
{
- jj_scanpos = jj_scanpos.next;
+ jj_scanpos = jj_scanpos.Next;
}
if (jj_rescan)
{
- int i = 0; Token tok = token;
- while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; }
+ int i = 0; Token tok = Token;
+ while (tok != null && tok != jj_scanpos) { i++; tok = tok.Next; }
if (tok != null) Jj_add_error_token(kind, i);
}
- if (jj_scanpos.kind != kind) return true;
+ if (jj_scanpos.Kind != kind) return true;
if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;
return false;
}
@@ -724,31 +724,31 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
/// <summary>Get the next Token.</summary>
public Token GetNextToken()
{
- if (token.next != null) token = token.next;
- else token = token.next = token_source.GetNextToken();
+ if (Token.Next != null) Token = Token.Next;
+ else Token = Token.Next = TokenSource.GetNextToken();
jj_ntk = -1;
jj_gen++;
- return token;
+ return Token;
}
/// <summary>Get the specific Token.</summary>
public Token GetToken(int index)
{
- Token t = token;
+ Token t = Token;
for (int i = 0; i < index; i++)
{
- if (t.next != null) t = t.next;
- else t = t.next = token_source.GetNextToken();
+ if (t.Next != null) t = t.Next;
+ else t = t.Next = TokenSource.GetNextToken();
}
return t;
}
private int Jj_ntk()
{
- if ((jj_nt = token.next) == null)
- return (jj_ntk = (token.next = token_source.GetNextToken()).kind);
+ if ((Jj_nt = Token.Next) == null)
+ return (jj_ntk = (Token.Next = TokenSource.GetNextToken()).Kind);
else
- return (jj_ntk = jj_nt.kind);
+ return (jj_ntk = Jj_nt.Kind);
}
private IList<int[]> jj_expentries = new List<int[]>();
@@ -832,7 +832,7 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
{
exptokseq[i] = jj_expentries[i];
}
- return new ParseException(token, exptokseq, QueryParserConstants.TokenImage);
+ return new ParseException(Token, exptokseq, QueryParserConstants.TokenImage);
}
/// <summary>Enable tracing. </summary>
@@ -879,7 +879,7 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
if (p.next == null) { p = p.next = new JJCalls(); break; }
p = p.next;
}
- p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla;
+ p.gen = jj_gen + xla - jj_la; p.first = Token; p.arg = xla;
}
internal sealed class JJCalls
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/13db3e69/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
index 58fcbbf..f477104 100644
--- a/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/QueryParserTokenManager.cs
@@ -1,4 +1,5 @@
-\ufeffusing System;
+\ufeffusing Lucene.Net.Support;
+using System;
using System.Diagnostics.CodeAnalysis;
using System.IO;
@@ -27,11 +28,11 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
public class QueryParserTokenManager //: QueryParserConstants
{
/// <summary>Debug output. </summary>
- public TextWriter debugStream;
+ private TextWriter debugStream; // LUCENENET specific - made private, since we already have a setter
/// <summary>Set debug output. </summary>
public virtual void SetDebugStream(TextWriter ds)
{
- debugStream = ds;
+ debugStream = new SafeTextWriterWrapper(ds);
}
private int JjStopStringLiteralDfa_1(int pos, long active0)
{
@@ -626,10 +627,10 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
endColumn = m_input_stream.EndColumn;
t = Token.NewToken(jjmatchedKind, curTokenImage);
- t.beginLine = beginLine;
- t.endLine = endLine;
- t.beginColumn = beginColumn;
- t.endColumn = endColumn;
+ t.BeginLine = beginLine;
+ t.EndLine = endLine;
+ t.BeginColumn = beginColumn;
+ t.EndColumn = endColumn;
return t;
}
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/13db3e69/src/Lucene.Net.QueryParser/Surround/Parser/Token.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.QueryParser/Surround/Parser/Token.cs b/src/Lucene.Net.QueryParser/Surround/Parser/Token.cs
index 7813dd7..32f95da 100644
--- a/src/Lucene.Net.QueryParser/Surround/Parser/Token.cs
+++ b/src/Lucene.Net.QueryParser/Surround/Parser/Token.cs
@@ -27,59 +27,59 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
#endif
public class Token
{
-
- /// <summary>
+
+ /// <summary>
/// An integer that describes the kind of this token. This numbering
- /// system is determined by JavaCCParser, and a table of these numbers is
- /// stored in the file ...Constants.java.
- /// </summary>
- public int kind;
+ /// system is determined by JavaCCParser, and a table of these numbers is
+ /// stored in the file ...Constants.java.
+ /// </summary>
+ public int Kind { get; set; }
/// <summary>The line number of the first character of this Token. </summary>
- public int beginLine;
- /// <summary>The column number of the first character of this Token. </summary>
- public int beginColumn;
- /// <summary>The line number of the last character of this Token. </summary>
- public int endLine;
- /// <summary>The column number of the last character of this Token. </summary>
- public int endColumn;
-
- /// <summary>The string image of the token.</summary>
- public string image;
-
- /// <summary>
+ public int BeginLine { get; set; }
+ /// <summary>The column number of the first character of this Token. </summary>
+ public int BeginColumn { get; set; }
+ /// <summary>The line number of the last character of this Token. </summary>
+ public int EndLine { get; set; }
+ /// <summary>The column number of the last character of this Token. </summary>
+ public int EndColumn { get; set; }
+
+ /// <summary>The string image of the token.</summary>
+ public string Image { get; set; }
+
+ /// <summary>
/// A reference to the next regular (non-special) token from the input
- /// stream. If this is the last token from the input stream, or if the
- /// token manager has not read tokens beyond this one, this field is
- /// set to null. This is true only if this token is also a regular
- /// token. Otherwise, see below for a description of the contents of
- /// this field.
- /// </summary>
- public Token next;
-
- /// <summary>
+ /// stream. If this is the last token from the input stream, or if the
+ /// token manager has not read tokens beyond this one, this field is
+ /// set to null. This is true only if this token is also a regular
+ /// token. Otherwise, see below for a description of the contents of
+ /// this field.
+ /// </summary>
+ public Token Next { get; set; }
+
+ /// <summary>
/// This field is used to access special tokens that occur prior to this
- /// token, but after the immediately preceding regular (non-special) token.
- /// If there are no such special tokens, this field is set to null.
- /// When there are more than one such special token, this field refers
- /// to the last of these special tokens, which in turn refers to the next
- /// previous special token through its specialToken field, and so on
- /// until the first special token (whose specialToken field is null).
- /// The next fields of special tokens refer to other special tokens that
- /// immediately follow it (without an intervening regular token). If there
- /// is no such token, this field is null.
- /// </summary>
- public Token specialToken;
+ /// token, but after the immediately preceding regular (non-special) token.
+ /// If there are no such special tokens, this field is set to null.
+ /// When there are more than one such special token, this field refers
+ /// to the last of these special tokens, which in turn refers to the next
+ /// previous special token through its specialToken field, and so on
+ /// until the first special token (whose specialToken field is null).
+ /// The next fields of special tokens refer to other special tokens that
+ /// immediately follow it (without an intervening regular token). If there
+ /// is no such token, this field is null.
+ /// </summary>
+ public Token SpecialToken { get; set; }
- /// <summary>
+ /// <summary>
/// An optional attribute value of the Token.
- /// Tokens which are not used as syntactic sugar will often contain
- /// meaningful values that will be used later on by the compiler or
- /// interpreter. This attribute value is often different from the image.
- /// Any subclass of Token that actually wants to return a non-null value can
- /// override this method as appropriate.
- /// </summary>
- public virtual object Value
+ /// Tokens which are not used as syntactic sugar will often contain
+ /// meaningful values that will be used later on by the compiler or
+ /// interpreter. This attribute value is often different from the image.
+ /// Any subclass of Token that actually wants to return a non-null value can
+ /// override this method as appropriate.
+ /// </summary>
+ public virtual object Value
{
get { return null; }
}
@@ -104,8 +104,8 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
/// </summary>
public Token(int kind, string image)
{
- this.kind = kind;
- this.image = image;
+ this.Kind = kind;
+ this.Image = image;
}
/// <summary>
@@ -113,7 +113,7 @@ namespace Lucene.Net.QueryParsers.Surround.Parser
/// </summary>
public override string ToString()
{
- return image;
+ return Image;
}
/// <summary>
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/13db3e69/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs b/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
index 34b2f47..ab63341 100644
--- a/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
+++ b/src/Lucene.Net.Tests.QueryParser/Classic/TestQueryParser.cs
@@ -170,12 +170,12 @@ namespace Lucene.Net.QueryParsers.Classic
internal override Query HandleBareFuzzy(string qfield, Token fuzzySlop, string termImage)
{
- if (fuzzySlop.image.EndsWith("\u20ac"))
+ if (fuzzySlop.Image.EndsWith("\u20ac"))
{
float fms = FuzzyMinSim;
try
{
- fms = float.Parse(fuzzySlop.image.Substring(1, fuzzySlop.image.Length - 2));
+ fms = float.Parse(fuzzySlop.Image.Substring(1, fuzzySlop.Image.Length - 2));
}
catch (Exception /*ignored*/) { }
float value = float.Parse(termImage);