You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ar...@apache.org on 2008/07/15 23:44:10 UTC

svn commit: r677059 [12/19] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene.Net/Search/Function/...

Modified: incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/QueryParser/TestQueryParser.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs Tue Jul 15 14:44:04 2008
@@ -19,72 +19,66 @@
 
 using NUnit.Framework;
 
-using Analyzer = Lucene.Net.Analysis.Analyzer;
-using LowerCaseTokenizer = Lucene.Net.Analysis.LowerCaseTokenizer;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
-using Token = Lucene.Net.Analysis.Token;
-using TokenFilter = Lucene.Net.Analysis.TokenFilter;
-using TokenStream = Lucene.Net.Analysis.TokenStream;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using DateField = Lucene.Net.Documents.DateField;
 using DateTools = Lucene.Net.Documents.DateTools;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using Lucene.Net.Analysis;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Lucene.Net.Search;
 using Searchable = Lucene.Net.Search.Searchable;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using ParseException = Lucene.Net.QueryParsers.ParseException;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
-namespace Lucene.Net.QueryParser
+namespace Lucene.Net.QueryParsers
 {
 	
 	/// <summary> Tests QueryParser.</summary>
 	[TestFixture]
-    public class TestQueryParser
+	public class TestQueryParser : LuceneTestCase
 	{
-        public class AnonymousClassQueryParser : Lucene.Net.QueryParsers.QueryParser
-        {
-            private void  InitBlock(int[] type, TestQueryParser enclosingInstance)
-            {
-                this.type = type;
-                this.enclosingInstance = enclosingInstance;
-            }
-            private int[] type;
-            private TestQueryParser enclosingInstance;
-            public TestQueryParser Enclosing_Instance
-            {
-                get
-                {
-                    return enclosingInstance;
-                }
+		public class AnonymousClassQueryParser : Lucene.Net.QueryParsers.QueryParser
+		{
+			private void  InitBlock(int[] type, TestQueryParser enclosingInstance)
+			{
+				this.type = type;
+				this.enclosingInstance = enclosingInstance;
+			}
+			private int[] type;
+			private TestQueryParser enclosingInstance;
+			public TestQueryParser Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
 				
-            }
-            internal AnonymousClassQueryParser(int[] type, TestQueryParser enclosingInstance, System.String Param1, Lucene.Net.Analysis.Analyzer Param2):base(Param1, Param2)
-            {
-                InitBlock(type, enclosingInstance);
-            }
-            public override Lucene.Net.Search.Query GetWildcardQuery(System.String field, System.String termStr)
-            {
-                // override error checking of superclass
-                type[0] = 1;
-                return new TermQuery(new Term(field, termStr));
-            }
-            public override Lucene.Net.Search.Query GetPrefixQuery(System.String field, System.String termStr)
-            {
-                // override error checking of superclass
-                type[0] = 2;
-                return new TermQuery(new Term(field, termStr));
-            }
-			
-            public override Lucene.Net.Search.Query GetFieldQuery(System.String field, System.String queryText)
-            {
-                type[0] = 3;
-                return base.GetFieldQuery(field, queryText);
-            }
-        }
+			}
+			internal AnonymousClassQueryParser(int[] type, TestQueryParser enclosingInstance, System.String Param1, Lucene.Net.Analysis.Analyzer Param2):base(Param1, Param2)
+			{
+				InitBlock(type, enclosingInstance);
+			}
+			public override Lucene.Net.Search.Query GetWildcardQuery(System.String field, System.String termStr)
+			{
+				// override error checking of superclass
+				type[0] = 1;
+				return new TermQuery(new Term(field, termStr));
+			}
+			public override Lucene.Net.Search.Query GetPrefixQuery(System.String field, System.String termStr)
+			{
+				// override error checking of superclass
+				type[0] = 2;
+				return new TermQuery(new Term(field, termStr));
+			}
+			
+			public override Lucene.Net.Search.Query GetFieldQuery(System.String field, System.String queryText)
+			{
+				type[0] = 3;
+				return base.GetFieldQuery(field, queryText);
+			}
+		}
 		
 		public static Analyzer qpAnalyzer = new QPTestAnalyzer();
 		
@@ -100,22 +94,22 @@
 			internal bool inPhrase = false;
 			internal int savedStart = 0, savedEnd = 0;
 			
-			public override Token Next()
+			public override Lucene.Net.Analysis.Token Next()
 			{
 				if (inPhrase)
 				{
 					inPhrase = false;
-					return new Token("phrase2", savedStart, savedEnd);
+					return new Lucene.Net.Analysis.Token("phrase2", savedStart, savedEnd);
 				}
 				else
-					for (Token token = input.Next(); token != null; token = input.Next())
+					for (Lucene.Net.Analysis.Token token = input.Next(); token != null; token = input.Next())
 					{
 						if (token.TermText().Equals("phrase"))
 						{
 							inPhrase = true;
 							savedStart = token.StartOffset();
 							savedEnd = token.EndOffset();
-							return new Token("phrase1", savedStart, savedEnd);
+							return new Lucene.Net.Analysis.Token("phrase1", savedStart, savedEnd);
 						}
 						else if (!token.TermText().Equals("stop"))
 							return token;
@@ -154,8 +148,9 @@
 		private int originalMaxClauses;
 		
 		[SetUp]
-        public virtual void  SetUp()
+		public override void SetUp()
 		{
+			base.SetUp();
 			originalMaxClauses = BooleanQuery.GetMaxClauseCount();
 		}
 		
@@ -183,31 +178,31 @@
 			}
 		}
 		
-        public virtual void  AssertQueryEquals(Lucene.Net.QueryParsers.QueryParser qp, System.String field, System.String query, System.String result)
-        {
-            Query q = qp.Parse(query);
-            System.String s = q.ToString(field);
-            if (!s.Equals(result))
-            {
-                Assert.Fail("Query /" + query + "/ yielded /" + s + "/, expecting /" + result + "/");
-            }
-        }
-		
-        public virtual void  AssertEscapedQueryEquals(System.String query, Analyzer a, System.String result)
-        {
-            System.String escapedQuery = Lucene.Net.QueryParsers.QueryParser.Escape(query);
-            if (!escapedQuery.Equals(result))
-            {
-                Assert.Fail("Query /" + query + "/ yielded /" + escapedQuery + "/, expecting /" + result + "/");
-            }
-        }
+		public virtual void  AssertQueryEquals(Lucene.Net.QueryParsers.QueryParser qp, System.String field, System.String query, System.String result)
+		{
+			Query q = qp.Parse(query);
+			System.String s = q.ToString(field);
+			if (!s.Equals(result))
+			{
+				Assert.Fail("Query /" + query + "/ yielded /" + s + "/, expecting /" + result + "/");
+			}
+		}
+		
+		public virtual void  AssertEscapedQueryEquals(System.String query, Analyzer a, System.String result)
+		{
+			System.String escapedQuery = Lucene.Net.QueryParsers.QueryParser.Escape(query);
+			if (!escapedQuery.Equals(result))
+			{
+				Assert.Fail("Query /" + query + "/ yielded /" + escapedQuery + "/, expecting /" + result + "/");
+			}
+		}
 		
-        public virtual void  AssertWildcardQueryEquals(System.String query, bool lowercase, System.String result, bool allowLeadingWildcard)
+		public virtual void  AssertWildcardQueryEquals(System.String query, bool lowercase, System.String result, bool allowLeadingWildcard)
 		{
 			Lucene.Net.QueryParsers.QueryParser qp = GetParser(null);
 			qp.SetLowercaseExpandedTerms(lowercase);
-            qp.SetAllowLeadingWildcard(allowLeadingWildcard);
-            Query q = qp.Parse(query);
+			qp.SetAllowLeadingWildcard(allowLeadingWildcard);
+			Query q = qp.Parse(query);
 			System.String s = q.ToString("field");
 			if (!s.Equals(result))
 			{
@@ -215,12 +210,12 @@
 			}
 		}
 		
-        public virtual void  AssertWildcardQueryEquals(System.String query, bool lowercase, System.String result)
-        {
-            AssertWildcardQueryEquals(query, lowercase, result, false);
-        }
+		public virtual void  AssertWildcardQueryEquals(System.String query, bool lowercase, System.String result)
+		{
+			AssertWildcardQueryEquals(query, lowercase, result, false);
+		}
 		
-        public virtual void  AssertWildcardQueryEquals(System.String query, System.String result)
+		public virtual void  AssertWildcardQueryEquals(System.String query, System.String result)
 		{
 			Lucene.Net.QueryParsers.QueryParser qp = GetParser(null);
 			Query q = qp.Parse(query);
@@ -251,11 +246,14 @@
 		}
 		
 		[Test]
-        public virtual void  TestSimple()
+		public virtual void  TestSimple()
 		{
 			AssertQueryEquals("term term term", null, "term term term");
-			AssertQueryEquals("türm term term", null, "türm term term");
-			AssertQueryEquals("ümlaut", null, "ümlaut");
+			AssertQueryEquals("türm term term", new WhitespaceAnalyzer(), "türm term term");
+			AssertQueryEquals("ümlaut", new WhitespaceAnalyzer(), "ümlaut");
+			
+			AssertQueryEquals("\"\"", new KeywordAnalyzer(), "");
+			AssertQueryEquals("foo:\"\"", new KeywordAnalyzer(), "foo:");
 			
 			AssertQueryEquals("a AND b", null, "+a +b");
 			AssertQueryEquals("(a AND b)", null, "+a +b");
@@ -303,7 +301,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestPunct()
+		public virtual void  TestPunct()
 		{
 			Analyzer a = new WhitespaceAnalyzer();
 			AssertQueryEquals("a&b", a, "a&b");
@@ -312,7 +310,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestSlop()
+		public virtual void  TestSlop()
 		{
 			AssertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2");
 			AssertQueryEquals("\"term germ\"~2 flork", null, "\"term germ\"~2 flork");
@@ -322,7 +320,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestNumber()
+		public virtual void  TestNumber()
 		{
 			// The numbers go away because SimpleAnalzyer ignores them
 			AssertQueryEquals("3", null, "");
@@ -336,7 +334,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestWildcard()
+		public virtual void  TestWildcard()
 		{
 			AssertQueryEquals("term*", null, "term*");
 			AssertQueryEquals("term*^2", null, "term*^2.0");
@@ -357,15 +355,9 @@
 			fq = (FuzzyQuery) GetQuery("term~", null);
 			Assert.AreEqual(0.5f, fq.GetMinSimilarity(), 0.1f);
 			Assert.AreEqual(FuzzyQuery.defaultPrefixLength, fq.GetPrefixLength());
-			try
-			{
-				GetQuery("term~1.1", null); // value > 1, throws exception
-				Assert.Fail();
-			}
-			catch (ParseException pe)
-			{
-				// expected exception
-			}
+			
+			AssertParseException("term~1.1"); // value > 1, throws exception
+			
 			Assert.IsTrue(GetQuery("term*germ", null) is WildcardQuery);
 			
 			/* Tests to see that wild card terms are (or are not) properly
@@ -403,172 +395,197 @@
 			AssertWildcardQueryEquals("[A TO C]", "[a TO c]");
 			AssertWildcardQueryEquals("[A TO C]", true, "[a TO c]");
 			AssertWildcardQueryEquals("[A TO C]", false, "[A TO C]");
-            // Test suffix queries: first disallow
-            try
-            {
-                AssertWildcardQueryEquals("*Term", true, "*term");
-                Assert.Fail();
-            }
-            catch (ParseException pe)
-            {
-                // expected exception
-            }
-            try
-            {
-                AssertWildcardQueryEquals("?Term", true, "?term");
-                Assert.Fail();
-            }
-            catch (ParseException pe)
-            {
-                // expected exception
-            }
-            // Test suffix queries: then allow
-            AssertWildcardQueryEquals("*Term", true, "*term", true);
-            AssertWildcardQueryEquals("?Term", true, "?term", true);
-        }
+			// Test suffix queries: first disallow
+			try
+			{
+				AssertWildcardQueryEquals("*Term", true, "*term");
+				Assert.Fail();
+			}
+			catch (ParseException)
+			{
+				// expected exception
+			}
+			try
+			{
+				AssertWildcardQueryEquals("?Term", true, "?term");
+				Assert.Fail();
+			}
+			catch (ParseException)
+			{
+				// expected exception
+			}
+			// Test suffix queries: then allow
+			AssertWildcardQueryEquals("*Term", true, "*term", true);
+			AssertWildcardQueryEquals("?Term", true, "?term", true);
+		}
+		
+		[Test]
+		public virtual void  TestLeadingWildcardType()
+		{
+			Lucene.Net.QueryParsers.QueryParser qp = GetParser(null);
+			qp.SetAllowLeadingWildcard(true);
+			Assert.AreEqual(typeof(WildcardQuery), qp.Parse("t*erm*").GetType());
+			Assert.AreEqual(typeof(WildcardQuery), qp.Parse("?term*").GetType());
+			Assert.AreEqual(typeof(WildcardQuery), qp.Parse("*term*").GetType());
+		}
 		
 		[Test]
-        public virtual void  TestQPA()
+		public virtual void  TestQPA()
 		{
+			AssertQueryEquals("term term^3.0 term", qpAnalyzer, "term term^3.0 term");
+			AssertQueryEquals("term stop^3.0 term", qpAnalyzer, "term term");
+			
 			AssertQueryEquals("term term term", qpAnalyzer, "term term term");
 			AssertQueryEquals("term +stop term", qpAnalyzer, "term term");
 			AssertQueryEquals("term -stop term", qpAnalyzer, "term term");
+			
+			AssertQueryEquals("drop AND (stop) AND roll", qpAnalyzer, "+drop +roll");
+			AssertQueryEquals("term +(stop) term", qpAnalyzer, "term term");
+			AssertQueryEquals("term -(stop) term", qpAnalyzer, "term term");
+			
 			AssertQueryEquals("drop AND stop AND roll", qpAnalyzer, "+drop +roll");
 			AssertQueryEquals("term phrase term", qpAnalyzer, "term \"phrase1 phrase2\" term");
 			AssertQueryEquals("term AND NOT phrase term", qpAnalyzer, "+term -\"phrase1 phrase2\" term");
+			AssertQueryEquals("stop^3", qpAnalyzer, "");
 			AssertQueryEquals("stop", qpAnalyzer, "");
+			AssertQueryEquals("(stop)^3", qpAnalyzer, "");
+			AssertQueryEquals("((stop))^3", qpAnalyzer, "");
+			AssertQueryEquals("(stop^3)", qpAnalyzer, "");
+			AssertQueryEquals("((stop)^3)", qpAnalyzer, "");
+			AssertQueryEquals("(stop)", qpAnalyzer, "");
+			AssertQueryEquals("((stop))", qpAnalyzer, "");
 			Assert.IsTrue(GetQuery("term term term", qpAnalyzer) is BooleanQuery);
 			Assert.IsTrue(GetQuery("term +stop", qpAnalyzer) is TermQuery);
 		}
 		
 		[Test]
-        public virtual void  TestRange()
+		public virtual void  TestRange()
 		{
-            AssertQueryEquals("[ a TO z]", null, "[a TO z]");
-            Assert.IsTrue(GetQuery("[ a TO z]", null) is ConstantScoreRangeQuery);
+			AssertQueryEquals("[ a TO z]", null, "[a TO z]");
+			Assert.IsTrue(GetQuery("[ a TO z]", null) is ConstantScoreRangeQuery);
 			
-            Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new SimpleAnalyzer());
-            qp.SetUseOldRangeQuery(true);
-            Assert.IsTrue(qp.Parse("[ a TO z]") is RangeQuery);
-			
-            AssertQueryEquals("[ a TO z ]", null, "[a TO z]");
-            AssertQueryEquals("{ a TO z}", null, "{a TO z}");
-            AssertQueryEquals("{ a TO z }", null, "{a TO z}");
-            AssertQueryEquals("{ a TO z }^2.0", null, "{a TO z}^2.0");
-            AssertQueryEquals("[ a TO z] OR bar", null, "[a TO z] bar");
-            AssertQueryEquals("[ a TO z] AND bar", null, "+[a TO z] +bar");
-            AssertQueryEquals("( bar blar { a TO z}) ", null, "bar blar {a TO z}");
-            AssertQueryEquals("gack ( bar blar { a TO z}) ", null, "gack (bar blar {a TO z})");
-        }
-		
-        /// <summary>for testing legacy DateField support </summary>
-        private System.String GetLegacyDate(System.String s)
-        {
-            System.DateTime tempAux = System.DateTime.Parse(s, System.Globalization.CultureInfo.CurrentCulture);
-            return DateField.DateToString(tempAux);
-        }
-		
-        /// <summary>for testing DateTools support </summary>
-        private System.String GetDate(System.String s, DateTools.Resolution resolution)
-        {
-            System.DateTime tempAux = System.DateTime.Parse(s, System.Globalization.CultureInfo.CurrentCulture);
-            return GetDate(tempAux, resolution);
-        }
-		
-        /// <summary>for testing DateTools support </summary>
-        private System.String GetDate(System.DateTime d, DateTools.Resolution resolution)
-        {
-            if (resolution == null)
-            {
-                return DateField.DateToString(d);
-            }
-            else
-            {
-                return DateTools.DateToString(d, resolution);
-            }
-        }
+			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new SimpleAnalyzer());
+			qp.SetUseOldRangeQuery(true);
+			Assert.IsTrue(qp.Parse("[ a TO z]") is RangeQuery);
+			
+			AssertQueryEquals("[ a TO z ]", null, "[a TO z]");
+			AssertQueryEquals("{ a TO z}", null, "{a TO z}");
+			AssertQueryEquals("{ a TO z }", null, "{a TO z}");
+			AssertQueryEquals("{ a TO z }^2.0", null, "{a TO z}^2.0");
+			AssertQueryEquals("[ a TO z] OR bar", null, "[a TO z] bar");
+			AssertQueryEquals("[ a TO z] AND bar", null, "+[a TO z] +bar");
+			AssertQueryEquals("( bar blar { a TO z}) ", null, "bar blar {a TO z}");
+			AssertQueryEquals("gack ( bar blar { a TO z}) ", null, "gack (bar blar {a TO z})");
+		}
+		
+		/// <summary>for testing legacy DateField support </summary>
+		private System.String GetLegacyDate(System.String s)
+		{
+			System.DateTime tempAux = System.DateTime.Parse(s, System.Globalization.CultureInfo.CurrentCulture);
+			return DateField.DateToString(tempAux);
+		}
+		
+		/// <summary>for testing DateTools support </summary>
+		private System.String GetDate(System.String s, DateTools.Resolution resolution)
+		{
+			System.DateTime tempAux = System.DateTime.Parse(s, System.Globalization.CultureInfo.CurrentCulture);
+			return GetDate(tempAux, resolution);
+		}                                               
+		
+		/// <summary>for testing DateTools support </summary>
+		private System.String GetDate(System.DateTime d, DateTools.Resolution resolution)
+		{
+			if (resolution == null)
+			{
+				return DateField.DateToString(d);
+			}
+			else
+			{
+				return DateTools.DateToString(d, resolution);
+			}
+		}
 		
-        public virtual System.String GetDate(System.String s)
+		public virtual System.String GetDate(System.String s)
 		{
-            System.DateTime tempAux = System.DateTime.Parse(s);
-            return DateField.DateToString(tempAux);
+			System.DateTime tempAux = System.DateTime.Parse(s);
+			return DateField.DateToString(tempAux);
 		}
 		
 		private System.String GetLocalizedDate(int year, int month, int day, bool extendLastDate)
 		{
-            System.DateTime temp = new System.DateTime(year, month, day);
+			System.DateTime temp = new System.DateTime(year, month, day);
 			if (extendLastDate)
 			{
-                temp = temp.AddHours(23);
-                temp = temp.AddMinutes(59);
-                temp = temp.AddSeconds(59);
-                temp = temp.AddMilliseconds(999);
-			}
-            return temp.ToShortDateString();    // ToString("MM/d/yyy");
-		}
-		
-        /// <summary>for testing legacy DateField support </summary>
-        [Test]
-        public virtual void  TestLegacyDateRange()
-        {
-            System.String startDate = GetLocalizedDate(2002, 2, 1, false);
-            System.String endDate = GetLocalizedDate(2002, 2, 4, false);
-            System.Globalization.Calendar endDateExpected = new System.Globalization.GregorianCalendar();
-            // endDateExpected should be set to: "2002, 1, 4, 23, 59, 59, 999" otherwise what's the point of useing GregorianCalendar()   // {{Aroush-2.1}}
-            System.DateTime tempAux = new System.DateTime(2002, 2, 4, 23, 59, 59, 999);
-            AssertQueryEquals("[ " + startDate + " TO " + endDate + "]", null, "[" + GetLegacyDate(startDate) + " TO " + DateField.DateToString(tempAux) + "]");
-            AssertQueryEquals("{  " + startDate + "    " + endDate + "   }", null, "{" + GetLegacyDate(startDate) + " TO " + GetLegacyDate(endDate) + "}");
-        }
-		
-        [Test]
-        public virtual void  TestDateRange()
-		{
-            System.String startDate = GetLocalizedDate(2002, 2, 1, false);
-            System.String endDate = GetLocalizedDate(2002, 2, 4, false);
-            System.DateTime endDateExpected = new System.DateTime(2002, 2, 4, 23, 59, 59, 999);
-            System.String defaultField = "default";
-            System.String monthField = "month";
-            System.String hourField = "hour";
-            Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new SimpleAnalyzer());
-			
-            // Don't set any date resolution and verify if DateField is used
-            System.DateTime tempAux = endDateExpected;
-            AssertDateRangeQueryEquals(qp, defaultField, startDate, endDate, tempAux, null);
-			
-            // set a field specific date resolution
-            qp.SetDateResolution(monthField, DateTools.Resolution.MONTH);
-			
-            // DateField should still be used for defaultField
-            System.DateTime tempAux2 = endDateExpected;
-            AssertDateRangeQueryEquals(qp, defaultField, startDate, endDate, tempAux2, null);
-			
-            // set default date resolution to MILLISECOND 
-            qp.SetDateResolution(DateTools.Resolution.MILLISECOND);
-			
-            // set second field specific date resolution    
-            qp.SetDateResolution(hourField, DateTools.Resolution.HOUR);
-			
-            // for this field no field specific date resolution has been set,
-            // so verify if the default resolution is used
-            System.DateTime tempAux3 = endDateExpected;
-            AssertDateRangeQueryEquals(qp, defaultField, startDate, endDate, tempAux3, DateTools.Resolution.MILLISECOND);
-			
-            // verify if field specific date resolutions are used for these two fields
-            System.DateTime tempAux4 = endDateExpected;
-            AssertDateRangeQueryEquals(qp, monthField, startDate, endDate, tempAux4, DateTools.Resolution.MONTH);
-			
-            System.DateTime tempAux5 = endDateExpected;
-            AssertDateRangeQueryEquals(qp, hourField, startDate, endDate, tempAux5, DateTools.Resolution.HOUR);
-        }
-		
-        public virtual void  AssertDateRangeQueryEquals(Lucene.Net.QueryParsers.QueryParser qp, System.String field, System.String startDate, System.String endDate, System.DateTime endDateInclusive, DateTools.Resolution resolution)
-        {
-            AssertQueryEquals(qp, field, field + ":[" + startDate + " TO " + endDate + "]", "[" + GetDate(startDate, resolution) + " TO " + GetDate(endDateInclusive, resolution) + "]");
-            AssertQueryEquals(qp, field, field + ":{" + startDate + " TO " + endDate + "}", "{" + GetDate(startDate, resolution) + " TO " + GetDate(endDate, resolution) + "}");
-        }
+				temp = temp.AddHours(23);
+				temp = temp.AddMinutes(59);
+				temp = temp.AddSeconds(59);
+				temp = temp.AddMilliseconds(999);
+			}
+			return temp.ToShortDateString();    // ToString("MM/d/yyy");
+		}
 		
-        [Test]
-        public virtual void  TestEscaped()
+		/// <summary>for testing legacy DateField support </summary>
+		[Test]
+		public virtual void  TestLegacyDateRange()
+		{
+			System.String startDate = GetLocalizedDate(2002, 2, 1, false);
+			System.String endDate = GetLocalizedDate(2002, 2, 4, false);
+			System.Globalization.Calendar endDateExpected = new System.Globalization.GregorianCalendar();
+			// endDateExpected should be set to: "2002, 1, 4, 23, 59, 59, 999" otherwise what's the point of useing GregorianCalendar()   // {{Aroush-2.1}}
+			System.DateTime tempAux = new System.DateTime(2002, 2, 4, 23, 59, 59, 999);
+			AssertQueryEquals("[ " + startDate + " TO " + endDate + "]", null, "[" + GetLegacyDate(startDate) + " TO " + DateField.DateToString(tempAux) + "]");
+			AssertQueryEquals("{  " + startDate + "    " + endDate + "   }", null, "{" + GetLegacyDate(startDate) + " TO " + GetLegacyDate(endDate) + "}");
+		}
+		
+		[Test]
+		public virtual void  TestDateRange()
+		{
+			System.String startDate = GetLocalizedDate(2002, 2, 1, false);
+			System.String endDate = GetLocalizedDate(2002, 2, 4, false);
+			System.DateTime endDateExpected = new System.DateTime(2002, 2, 4, 23, 59, 59, 999);
+			System.String defaultField = "default";
+			System.String monthField = "month";
+			System.String hourField = "hour";
+			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new SimpleAnalyzer());
+			
+			// Don't set any date resolution and verify if DateField is used
+			System.DateTime tempAux = endDateExpected;
+			AssertDateRangeQueryEquals(qp, defaultField, startDate, endDate, tempAux, null);
+			
+			// set a field specific date resolution
+			qp.SetDateResolution(monthField, DateTools.Resolution.MONTH);
+			
+			// DateField should still be used for defaultField
+			System.DateTime tempAux2 = endDateExpected;
+			AssertDateRangeQueryEquals(qp, defaultField, startDate, endDate, tempAux2, null);
+			
+			// set default date resolution to MILLISECOND 
+			qp.SetDateResolution(DateTools.Resolution.MILLISECOND);
+			
+			// set second field specific date resolution    
+			qp.SetDateResolution(hourField, DateTools.Resolution.HOUR);
+			
+			// for this field no field specific date resolution has been set,
+			// so verify if the default resolution is used
+			System.DateTime tempAux3 = endDateExpected;
+			AssertDateRangeQueryEquals(qp, defaultField, startDate, endDate, tempAux3, DateTools.Resolution.MILLISECOND);
+			
+			// verify if field specific date resolutions are used for these two fields
+			System.DateTime tempAux4 = endDateExpected;
+			AssertDateRangeQueryEquals(qp, monthField, startDate, endDate, tempAux4, DateTools.Resolution.MONTH);
+			
+			System.DateTime tempAux5 = endDateExpected;
+			AssertDateRangeQueryEquals(qp, hourField, startDate, endDate, tempAux5, DateTools.Resolution.HOUR);
+		}
+		
+		public virtual void  AssertDateRangeQueryEquals(Lucene.Net.QueryParsers.QueryParser qp, System.String field, System.String startDate, System.String endDate, System.DateTime endDateInclusive, DateTools.Resolution resolution)
+		{
+			AssertQueryEquals(qp, field, field + ":[" + startDate + " TO " + endDate + "]", "[" + GetDate(startDate, resolution) + " TO " + GetDate(endDateInclusive, resolution) + "]");
+			AssertQueryEquals(qp, field, field + ":{" + startDate + " TO " + endDate + "}", "{" + GetDate(startDate, resolution) + " TO " + GetDate(endDate, resolution) + "}");
+		}
+		
+		[Test]
+		public virtual void  TestEscaped()
 		{
 			Analyzer a = new WhitespaceAnalyzer();
 			
@@ -596,6 +613,8 @@
 			//assertQueryEquals("foo \\|| bar", a, "foo \\|| bar");
 			//assertQueryEquals("foo \\AND bar", a, "foo \\AND bar");*/
 			
+			AssertQueryEquals("\\a", a, "a");
+			
 			AssertQueryEquals("a\\-b:c", a, "a-b:c");
 			AssertQueryEquals("a\\+b:c", a, "a+b:c");
 			AssertQueryEquals("a\\:b:c", a, "a:b:c");
@@ -627,90 +646,79 @@
 			AssertQueryEquals("[ a\\: TO a\\~ ]", null, "[a: TO a~]");
 			AssertQueryEquals("[ a\\\\ TO a\\* ]", null, "[a\\ TO a*]");
 			
-            AssertQueryEquals("[\"c\\:\\\\temp\\\\\\~foo0.txt\" TO \"c\\:\\\\temp\\\\\\~foo9.txt\"]", a, "[c:\\temp\\~foo0.txt TO c:\\temp\\~foo9.txt]");
+			AssertQueryEquals("[\"c\\:\\\\temp\\\\\\~foo0.txt\" TO \"c\\:\\\\temp\\\\\\~foo9.txt\"]", a, "[c:\\temp\\~foo0.txt TO c:\\temp\\~foo9.txt]");
+			
+			AssertQueryEquals("a\\\\\\+b", a, "a\\+b");
+			
+			AssertQueryEquals("a \\\"b c\\\" d", a, "a \"b c\" d");
+			AssertQueryEquals("\"a \\\"b c\\\" d\"", a, "\"a \"b c\" d\"");
+			AssertQueryEquals("\"a \\+b c d\"", a, "\"a +b c d\"");
+			
+			AssertQueryEquals("c\\:\\\\temp\\\\\\~foo.txt", a, "c:\\temp\\~foo.txt");
+			
+			AssertParseException("XY\\"); // there must be a character after the escape char
 			
-            AssertQueryEquals("a\\\\\\+b", a, "a\\+b");
+			// test unicode escaping
+			AssertQueryEquals("a\\u0062c", a, "abc");
+			AssertQueryEquals("XY\\u005a", a, "XYZ");
+			AssertQueryEquals("XY\\u005A", a, "XYZ");
+			AssertQueryEquals("\"a \\\\\\u0028\\u0062\\\" c\"", a, "\"a \\(b\" c\"");
 			
-            AssertQueryEquals("a \\\"b c\\\" d", a, "a \"b c\" d");
-            AssertQueryEquals("\"a \\\"b c\\\" d\"", a, "\"a \"b c\" d\"");
-            AssertQueryEquals("\"a \\+b c d\"", a, "\"a +b c d\"");
-			
-            AssertQueryEquals("c\\:\\\\temp\\\\\\~foo.txt", a, "c:\\temp\\~foo.txt");
-			
-			
-            try
-            {
-                AssertQueryEquals("XY\\", a, "XYZ");
-                Assert.Fail("ParseException expected, not thrown");
-            }
-            catch (ParseException expected)
-            {
-            }
-			
-            // test unicode escaping
-            AssertQueryEquals("a\\u0062c", a, "abc");
-            AssertQueryEquals("XY\\u005a", a, "XYZ");
-            AssertQueryEquals("XY\\u005A", a, "XYZ");
-            AssertQueryEquals("\"a \\\\\\u0028\\u0062\\\" c\"", a, "\"a \\(b\" c\"");
-			
-            try
-            {
-                AssertQueryEquals("XY\\u005G", a, "XYZ");
-                Assert.Fail("ParseException expected, not thrown");
-            }
-            catch (ParseException expected)
-            {
-            }
-			
-            try
-            {
-                AssertQueryEquals("XY\\u005", a, "XYZ");
-                Assert.Fail("ParseException expected, not thrown");
-            }
-            catch (ParseException expected)
-            {
-            }
-        }
-		
-        [Test]
-        public virtual void  TestQueryStringEscaping()
-        {
-            Analyzer a = new WhitespaceAnalyzer();
-			
-            AssertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c");
-            AssertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c");
-            AssertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
-            AssertEscapedQueryEquals("a\\b:c", a, "a\\\\b\\:c");
-			
-            AssertEscapedQueryEquals("a:b-c", a, "a\\:b\\-c");
-            AssertEscapedQueryEquals("a:b+c", a, "a\\:b\\+c");
-            AssertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
-            AssertEscapedQueryEquals("a:b\\c", a, "a\\:b\\\\c");
-			
-            AssertEscapedQueryEquals("a:b-c*", a, "a\\:b\\-c\\*");
-            AssertEscapedQueryEquals("a:b+c*", a, "a\\:b\\+c\\*");
-            AssertEscapedQueryEquals("a:b:c*", a, "a\\:b\\:c\\*");
-			
-            AssertEscapedQueryEquals("a:b\\\\c*", a, "a\\:b\\\\\\\\c\\*");
-			
-            AssertEscapedQueryEquals("a:b-?c", a, "a\\:b\\-\\?c");
-            AssertEscapedQueryEquals("a:b+?c", a, "a\\:b\\+\\?c");
-            AssertEscapedQueryEquals("a:b:?c", a, "a\\:b\\:\\?c");
-			
-            AssertEscapedQueryEquals("a:b?c", a, "a\\:b\\?c");
-			
-            AssertEscapedQueryEquals("a:b-c~", a, "a\\:b\\-c\\~");
-            AssertEscapedQueryEquals("a:b+c~", a, "a\\:b\\+c\\~");
-            AssertEscapedQueryEquals("a:b:c~", a, "a\\:b\\:c\\~");
-            AssertEscapedQueryEquals("a:b\\c~", a, "a\\:b\\\\c\\~");
-			
-            AssertEscapedQueryEquals("[ a - TO a+ ]", null, "\\[ a \\- TO a\\+ \\]");
-            AssertEscapedQueryEquals("[ a : TO a~ ]", null, "\\[ a \\: TO a\\~ \\]");
-            AssertEscapedQueryEquals("[ a\\ TO a* ]", null, "\\[ a\\\\ TO a\\* \\]");
-        }
+			AssertParseException("XY\\u005G"); // test non-hex character in escaped unicode sequence
+			AssertParseException("XY\\u005"); // test incomplete escaped unicode sequence
+			
+			// Tests bug LUCENE-800
+			AssertQueryEquals("(item:\\\\ item:ABCD\\\\)", a, "item:\\ item:ABCD\\");
+			AssertParseException("(item:\\\\ item:ABCD\\\\))"); // unmatched closing paranthesis 
+			AssertQueryEquals("\\*", a, "*");
+			AssertQueryEquals("\\\\", a, "\\"); // escaped backslash
+			
+			AssertParseException("\\"); // a backslash must always be escaped
+		}
+		
+		[Test]
+		public virtual void  TestQueryStringEscaping()
+		{
+			Analyzer a = new WhitespaceAnalyzer();
+			
+			AssertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c");
+			AssertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c");
+			AssertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
+			AssertEscapedQueryEquals("a\\b:c", a, "a\\\\b\\:c");
+			
+			AssertEscapedQueryEquals("a:b-c", a, "a\\:b\\-c");
+			AssertEscapedQueryEquals("a:b+c", a, "a\\:b\\+c");
+			AssertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
+			AssertEscapedQueryEquals("a:b\\c", a, "a\\:b\\\\c");
+			
+			AssertEscapedQueryEquals("a:b-c*", a, "a\\:b\\-c\\*");
+			AssertEscapedQueryEquals("a:b+c*", a, "a\\:b\\+c\\*");
+			AssertEscapedQueryEquals("a:b:c*", a, "a\\:b\\:c\\*");
+			
+			AssertEscapedQueryEquals("a:b\\\\c*", a, "a\\:b\\\\\\\\c\\*");
+			
+			AssertEscapedQueryEquals("a:b-?c", a, "a\\:b\\-\\?c");
+			AssertEscapedQueryEquals("a:b+?c", a, "a\\:b\\+\\?c");
+			AssertEscapedQueryEquals("a:b:?c", a, "a\\:b\\:\\?c");
+			
+			AssertEscapedQueryEquals("a:b?c", a, "a\\:b\\?c");
+			
+			AssertEscapedQueryEquals("a:b-c~", a, "a\\:b\\-c\\~");
+			AssertEscapedQueryEquals("a:b+c~", a, "a\\:b\\+c\\~");
+			AssertEscapedQueryEquals("a:b:c~", a, "a\\:b\\:c\\~");
+			AssertEscapedQueryEquals("a:b\\c~", a, "a\\:b\\\\c\\~");
+			
+			AssertEscapedQueryEquals("[ a - TO a+ ]", null, "\\[ a \\- TO a\\+ \\]");
+			AssertEscapedQueryEquals("[ a : TO a~ ]", null, "\\[ a \\: TO a\\~ \\]");
+			AssertEscapedQueryEquals("[ a\\ TO a* ]", null, "\\[ a\\\\ TO a\\* \\]");
+			
+			// LUCENE-881
+			AssertEscapedQueryEquals("|| abc ||", a, "\\|\\| abc \\|\\|");
+			AssertEscapedQueryEquals("&& abc &&", a, "\\&\\& abc \\&\\&");
+		}
 		
-        [Test]
-        public virtual void  TestTabNewlineCarriageReturn()
+		[Test]
+		public virtual void  TestTabNewlineCarriageReturn()
 		{
 			AssertQueryEqualsDOA("+weltbank +worlbank", null, "+weltbank +worlbank");
 			
@@ -733,7 +741,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestSimpleDAO()
+		public virtual void  TestSimpleDAO()
 		{
 			AssertQueryEqualsDOA("term term term", null, "+term +term +term");
 			AssertQueryEqualsDOA("term +term term", null, "+term +term +term");
@@ -743,7 +751,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestBoost()
+		public virtual void  TestBoost()
 		{
 			StandardAnalyzer oneStopAnalyzer = new StandardAnalyzer(new System.String[]{"on"});
 			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", oneStopAnalyzer);
@@ -766,49 +774,60 @@
 			Assert.AreEqual(1.0f, q.GetBoost(), 0.01f);
 		}
 		
-		[Test]
-        public virtual void  TestException()
+		public virtual void  AssertParseException(System.String queryString)
 		{
 			try
 			{
-				AssertQueryEquals("\"some phrase", null, "abc");
-				Assert.Fail("ParseException expected, not thrown");
+				Query q = GetQuery(queryString, null);
 			}
-			catch (ParseException expected)
+			catch (ParseException)
 			{
+				return ;
 			}
+			Assert.Fail("ParseException expected, not thrown");
 		}
 		
 		[Test]
-        public virtual void  TestCustomQueryParserWildcard()
+		public virtual void  TestException()
+		{
+			AssertParseException("\"some phrase");
+			AssertParseException("(foo bar");
+			AssertParseException("foo bar))");
+			AssertParseException("field:term:with:colon some more terms");
+			AssertParseException("(sub query)^5.0^2.0 plus more");
+			AssertParseException("secret AND illegal) AND access:confidential");
+		}
+		
+		[Test]
+		public virtual void  TestCustomQueryParserWildcard()
 		{
 			try
 			{
 				new QPTestParser("contents", new WhitespaceAnalyzer()).Parse("a?t");
 				Assert.Fail("Wildcard queries should not be allowed");
 			}
-			catch (ParseException expected)
+			catch (ParseException)
 			{
 				// expected exception
 			}
 		}
 		
 		[Test]
-        public virtual void  TestCustomQueryParserFuzzy()
+		public virtual void  TestCustomQueryParserFuzzy()
 		{
 			try
 			{
 				new QPTestParser("contents", new WhitespaceAnalyzer()).Parse("xunit~");
 				Assert.Fail("Fuzzy queries should not be allowed");
 			}
-			catch (ParseException expected)
+			catch (ParseException)
 			{
 				// expected exception
 			}
 		}
 		
 		[Test]
-        public virtual void  TestBooleanQuery()
+		public virtual void  TestBooleanQuery()
 		{
 			BooleanQuery.SetMaxClauseCount(2);
 			try
@@ -817,7 +836,7 @@
 				qp.Parse("one two three");
 				Assert.Fail("ParseException expected due to too many boolean clauses");
 			}
-			catch (ParseException expected)
+			catch (ParseException)
 			{
 				// too many boolean clauses, so ParseException is expected
 			}
@@ -825,7 +844,7 @@
 		
 		/// <summary> This test differs from TestPrecedenceQueryParser</summary>
 		[Test]
-        public virtual void  TestPrecedence()
+		public virtual void  TestPrecedence()
 		{
 			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new WhitespaceAnalyzer());
 			Query query1 = qp.Parse("A AND B OR C AND D");
@@ -833,98 +852,146 @@
 			Assert.AreEqual(query1, query2);
 		}
 		
-        [Test]
-        public virtual void  TestLocalDateFormat()
-        {
-            Lucene.Net.Store.RAMDirectory ramDir = new Lucene.Net.Store.RAMDirectory();
-            Lucene.Net.Index.IndexWriter iw = new Lucene.Net.Index.IndexWriter(ramDir, new WhitespaceAnalyzer(), true);
-            AddDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
-            AddDateDoc("b", 2005, 12, 4, 22, 15, 0, iw);
-            iw.Close();
-            Lucene.Net.Search.IndexSearcher is_Renamed = new Lucene.Net.Search.IndexSearcher(ramDir);
-            AssertHits(1, "[12/1/2005 TO 12/3/2005]", is_Renamed);
-            AssertHits(2, "[12/1/2005 TO 12/4/2005]", is_Renamed);
-            AssertHits(1, "[12/3/2005 TO 12/4/2005]", is_Renamed);
-            AssertHits(1, "{12/1/2005 TO 12/3/2005}", is_Renamed);
-            AssertHits(1, "{12/1/2005 TO 12/4/2005}", is_Renamed);
-            AssertHits(0, "{12/3/2005 TO 12/4/2005}", is_Renamed);
-            is_Renamed.Close();
-        }
-		
-        [Test]
-        public virtual void  TestStarParsing()
-        {
-            int[] type = new int[1];
-            Lucene.Net.QueryParsers.QueryParser qp = new AnonymousClassQueryParser(type, this, "field", new WhitespaceAnalyzer());
-			
-            TermQuery tq;
-			
-            tq = (TermQuery) qp.Parse("foo:zoo*");
-            Assert.AreEqual("zoo", tq.GetTerm().Text());
-            Assert.AreEqual(2, type[0]);
-			
-            tq = (TermQuery) qp.Parse("foo:zoo*^2");
-            Assert.AreEqual("zoo", tq.GetTerm().Text());
-            Assert.AreEqual(2, type[0]);
-            Assert.AreEqual(tq.GetBoost(), 2, 0);
-			
-            tq = (TermQuery) qp.Parse("foo:*");
-            Assert.AreEqual("*", tq.GetTerm().Text());
-            Assert.AreEqual(1, type[0]); // could be a valid prefix query in the future too
-			
-            tq = (TermQuery) qp.Parse("foo:*^2");
-            Assert.AreEqual("*", tq.GetTerm().Text());
-            Assert.AreEqual(1, type[0]);
-            Assert.AreEqual(tq.GetBoost(), 2, 0);
-			
-            tq = (TermQuery) qp.Parse("*:foo");
-            Assert.AreEqual("*", tq.GetTerm().Field());
-            Assert.AreEqual("foo", tq.GetTerm().Text());
-            Assert.AreEqual(3, type[0]);
-			
-            tq = (TermQuery) qp.Parse("*:*");
-            Assert.AreEqual("*", tq.GetTerm().Field());
-            Assert.AreEqual("*", tq.GetTerm().Text());
-            Assert.AreEqual(1, type[0]); // could be handled as a prefix query in the future
-			
-            tq = (TermQuery) qp.Parse("(*:*)");
-            Assert.AreEqual("*", tq.GetTerm().Field());
-            Assert.AreEqual("*", tq.GetTerm().Text());
-            Assert.AreEqual(1, type[0]);
-        }
-		
-        [Test]
-        public virtual void  TestMatchAllDocs()
-        {
-            Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new WhitespaceAnalyzer());
-            Assert.AreEqual(new MatchAllDocsQuery(), qp.Parse("*:*"));
-            Assert.AreEqual(new MatchAllDocsQuery(), qp.Parse("(*:*)"));
-            BooleanQuery bq = (BooleanQuery) qp.Parse("+*:* -*:*");
-            Assert.IsTrue(bq.GetClauses()[0].GetQuery() is MatchAllDocsQuery);
-            Assert.IsTrue(bq.GetClauses()[1].GetQuery() is MatchAllDocsQuery);
-        }
-		
-        private void  AssertHits(int expected, System.String query, Lucene.Net.Search.IndexSearcher is_Renamed)
-        {
-            Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("date", new WhitespaceAnalyzer());
-            qp.SetLocale(new System.Globalization.CultureInfo("en-US"));
-            Query q = qp.Parse(query);
-            Lucene.Net.Search.Hits hits = is_Renamed.Search(q);
-            Assert.AreEqual(expected, hits.Length());
-        }
-		
-        private static void  AddDateDoc(System.String content, int year, int month, int day, int hour, int minute, int second, Lucene.Net.Index.IndexWriter iw)
-        {
-            Lucene.Net.Documents.Document d = new Lucene.Net.Documents.Document();
-            d.Add(new Lucene.Net.Documents.Field("f", content, Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.TOKENIZED));
-            System.DateTime tempAux = new System.DateTime(year, month, day, hour, minute, second);
-            d.Add(new Lucene.Net.Documents.Field("date", DateField.DateToString(tempAux), Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.UN_TOKENIZED));
-            iw.AddDocument(d);
-        }
+		[Test]
+		public virtual void  TestLocalDateFormat()
+		{
+			Lucene.Net.Store.RAMDirectory ramDir = new Lucene.Net.Store.RAMDirectory();
+			Lucene.Net.Index.IndexWriter iw = new Lucene.Net.Index.IndexWriter(ramDir, new WhitespaceAnalyzer(), true);
+			AddDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
+			AddDateDoc("b", 2005, 12, 4, 22, 15, 0, iw);
+			iw.Close();
+			Lucene.Net.Search.IndexSearcher is_Renamed = new Lucene.Net.Search.IndexSearcher(ramDir);
+			AssertHits(1, "[12/1/2005 TO 12/3/2005]", is_Renamed);
+			AssertHits(2, "[12/1/2005 TO 12/4/2005]", is_Renamed);
+			AssertHits(1, "[12/3/2005 TO 12/4/2005]", is_Renamed);
+			AssertHits(1, "{12/1/2005 TO 12/3/2005}", is_Renamed);
+			AssertHits(1, "{12/1/2005 TO 12/4/2005}", is_Renamed);
+			AssertHits(0, "{12/3/2005 TO 12/4/2005}", is_Renamed);
+			is_Renamed.Close();
+		}
+		
+		[Test]
+		public virtual void  TestStarParsing()
+		{
+			int[] type = new int[1];
+			Lucene.Net.QueryParsers.QueryParser qp = new AnonymousClassQueryParser(type, this, "field", new WhitespaceAnalyzer());
+			
+			TermQuery tq;
+			
+			tq = (TermQuery) qp.Parse("foo:zoo*");
+			Assert.AreEqual("zoo", tq.GetTerm().Text());
+			Assert.AreEqual(2, type[0]);
+			
+			tq = (TermQuery) qp.Parse("foo:zoo*^2");
+			Assert.AreEqual("zoo", tq.GetTerm().Text());
+			Assert.AreEqual(2, type[0]);
+			Assert.AreEqual(tq.GetBoost(), 2, 0);
+			
+			tq = (TermQuery) qp.Parse("foo:*");
+			Assert.AreEqual("*", tq.GetTerm().Text());
+			Assert.AreEqual(1, type[0]); // could be a valid prefix query in the future too
+			
+			tq = (TermQuery) qp.Parse("foo:*^2");
+			Assert.AreEqual("*", tq.GetTerm().Text());
+			Assert.AreEqual(1, type[0]);
+			Assert.AreEqual(tq.GetBoost(), 2, 0);
+			
+			tq = (TermQuery) qp.Parse("*:foo");
+			Assert.AreEqual("*", tq.GetTerm().Field());
+			Assert.AreEqual("foo", tq.GetTerm().Text());
+			Assert.AreEqual(3, type[0]);
+			
+			tq = (TermQuery) qp.Parse("*:*");
+			Assert.AreEqual("*", tq.GetTerm().Field());
+			Assert.AreEqual("*", tq.GetTerm().Text());
+			Assert.AreEqual(1, type[0]); // could be handled as a prefix query in the future
+			
+			tq = (TermQuery) qp.Parse("(*:*)");
+			Assert.AreEqual("*", tq.GetTerm().Field());
+			Assert.AreEqual("*", tq.GetTerm().Text());
+			Assert.AreEqual(1, type[0]);
+		}
+		
+		[Test]
+		public virtual void  TestStopwords()
+		{
+			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("a", new StopAnalyzer(new System.String[] { "the", "foo" }));
+			Query result = qp.Parse("a:the OR a:foo");
+			Assert.IsNotNull(result, "result is null and it shouldn't be");
+			Assert.IsTrue(result is BooleanQuery, "result is not a BooleanQuery");
+			Assert.IsTrue(((BooleanQuery) result).Clauses().Count == 0, ((BooleanQuery) result).Clauses().Count + " does not equal: " + 0);
+			result = qp.Parse("a:woo OR a:the");
+			Assert.IsNotNull(result, "result is null and it shouldn't be");
+			Assert.IsTrue(result is TermQuery, "result is not a TermQuery");
+			result = qp.Parse("(fieldX:xxxxx OR fieldy:xxxxxxxx)^2 AND (fieldx:the OR fieldy:foo)");
+			Assert.IsNotNull(result, "result is null and it shouldn't be");
+			Assert.IsTrue(result is BooleanQuery, "result is not a BooleanQuery");
+			System.Console.Out.WriteLine("Result: " + result);
+			Assert.IsTrue(((BooleanQuery) result).Clauses().Count == 2, ((BooleanQuery) result).Clauses().Count + " does not equal: " + 2);
+		}
+		
+		[Test]
+		public virtual void  TestPositionIncrement()
+		{
+			bool dflt = StopFilter.GetEnablePositionIncrementsDefault();
+			StopFilter.SetEnablePositionIncrementsDefault(true);
+			try
+			{
+				Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("a", new StopAnalyzer(new System.String[] { "the", "in", "are", "this" }));
+				qp.SetEnablePositionIncrements(true);
+				System.String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\"";
+				//               0         2                      5           7  8
+				int[] expectedPositions = new int[]{1, 3, 4, 6, 9};
+				PhraseQuery pq = (PhraseQuery) qp.Parse(qtxt);
+				//System.out.println("Query text: "+qtxt);
+				//System.out.println("Result: "+pq);
+				Term[] t = pq.GetTerms();
+				int[] pos = pq.GetPositions();
+				for (int i = 0; i < t.Length; i++)
+				{
+					//System.out.println(i+". "+t[i]+"  pos: "+pos[i]);
+					Assert.AreEqual(expectedPositions[i], pos[i], "term " + i + " = " + t[i] + " has wrong term-position!");
+				}
+			}
+			finally
+			{
+				StopFilter.SetEnablePositionIncrementsDefault(dflt);
+			}
+		}
+		
+		[Test]
+		public virtual void  TestMatchAllDocs()
+		{
+			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new WhitespaceAnalyzer());
+			Assert.AreEqual(new MatchAllDocsQuery(), qp.Parse("*:*"));
+			Assert.AreEqual(new MatchAllDocsQuery(), qp.Parse("(*:*)"));
+			BooleanQuery bq = (BooleanQuery) qp.Parse("+*:* -*:*");
+			Assert.IsTrue(bq.GetClauses()[0].GetQuery() is MatchAllDocsQuery);
+			Assert.IsTrue(bq.GetClauses()[1].GetQuery() is MatchAllDocsQuery);
+		}
+		
+		private void  AssertHits(int expected, System.String query, Lucene.Net.Search.IndexSearcher is_Renamed)
+		{
+			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("date", new WhitespaceAnalyzer());
+			qp.SetLocale(new System.Globalization.CultureInfo("en-US"));
+			Query q = qp.Parse(query);
+			Lucene.Net.Search.Hits hits = is_Renamed.Search(q);
+			Assert.AreEqual(expected, hits.Length());
+		}
+		
+		private static void  AddDateDoc(System.String content, int year, int month, int day, int hour, int minute, int second, Lucene.Net.Index.IndexWriter iw)
+		{
+			Lucene.Net.Documents.Document d = new Lucene.Net.Documents.Document();
+			d.Add(new Lucene.Net.Documents.Field("f", content, Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.TOKENIZED));
+			System.DateTime tempAux = new System.DateTime(year, month, day, hour, minute, second);
+			d.Add(new Lucene.Net.Documents.Field("date", DateField.DateToString(tempAux), Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.UN_TOKENIZED));
+			iw.AddDocument(d);
+		}
 		
-        [TearDown]
-		public virtual void  TearDown()
+		[TearDown]
+		public override void TearDown()
 		{
+			base.TearDown();
 			BooleanQuery.SetMaxClauseCount(originalMaxClauses);
 		}
 	}

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/BaseTestRangeFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/BaseTestRangeFilter.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/BaseTestRangeFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/BaseTestRangeFilter.cs Tue Jul 15 14:44:04 2008
@@ -19,16 +19,17 @@
 
 using NUnit.Framework;
 
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 
 namespace Lucene.Net.Search
 {
 	[TestFixture]
-	public class BaseTestRangeFilter
+	public class BaseTestRangeFilter : LuceneTestCase
 	{
 		
 		public const bool F = false;

Added: incubator/lucene.net/trunk/C#/src/Test/Search/CachingWrapperFilterHelper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/CachingWrapperFilterHelper.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/CachingWrapperFilterHelper.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/CachingWrapperFilterHelper.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using IndexReader = Lucene.Net.Index.IndexReader;
+
+namespace Lucene.Net.Search
+{
+	
+	/// <summary> A unit test helper class to test when the filter is getting cached and when it is not.</summary>
+	[Serializable]
+	public class CachingWrapperFilterHelper : CachingWrapperFilter
+	{
+		
+		private bool shouldHaveCache = false;
+		
+		public CachingWrapperFilterHelper(Filter filter) : base(filter)
+		{
+		}
+		
+		public virtual void  SetShouldHaveCache(bool shouldHaveCache)
+		{
+			this.shouldHaveCache = shouldHaveCache;
+		}
+		
+		public override System.Collections.BitArray Bits(IndexReader reader)
+		{
+			if (cache == null)
+			{
+				cache = new System.Collections.Hashtable();
+			}
+			
+			lock (cache.SyncRoot)
+			{
+				// check cache
+				System.Collections.BitArray cached = (System.Collections.BitArray) cache[reader];
+				if (shouldHaveCache)
+				{
+					Assert.IsNotNull(cached, "Cache should have data ");
+				}
+				else
+				{
+					Assert.IsNull(cached, cached == null ? "Cache should be null " : "Cache should be null " + cached.ToString());
+					// argument evaluated prior to method call ->//Assert.IsNull(cached, "Cache should be null " + cached.ToString());
+				}
+				if (cached != null)
+				{
+					return cached;
+				}
+			}
+			
+			System.Collections.BitArray bits = filter.Bits(reader);
+			
+			lock (cache.SyncRoot)
+			{
+				// update cache
+				cache[reader] = bits;
+			}
+			
+			return bits;
+		}
+		
+		public override System.String ToString()
+		{
+			return "CachingWrapperFilterHelper(" + filter + ")";
+		}
+		
+		public  override bool Equals(System.Object o)
+		{
+			if (!(o is CachingWrapperFilterHelper))
+				return false;
+			return this.filter.Equals((CachingWrapperFilterHelper) o);
+		}
+		public override int GetHashCode()
+		{
+			return base.GetHashCode();
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/CheckHits.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/CheckHits.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/CheckHits.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/CheckHits.cs Tue Jul 15 14:44:04 2008
@@ -19,141 +19,152 @@
 
 using NUnit.Framework;
 
-using Directory = Lucene.Net.Store.Directory;
 using IndexReader = Lucene.Net.Index.IndexReader;
+using Directory = Lucene.Net.Store.Directory;
 
 namespace Lucene.Net.Search
 {
 	
 	public class CheckHits
 	{
-        private class AnonymousClassHitCollector : HitCollector
-        {
-            public AnonymousClassHitCollector(System.Collections.Hashtable actual)
-            {
-                InitBlock(actual);
-            }
-            private void  InitBlock(System.Collections.Hashtable actual)
-            {
-                this.actual = actual;
-            }
-            private System.Collections.Hashtable actual;
-            public override void  Collect(int doc, float score)
-            {
-                actual.Add((System.Int32) doc, (System.Int32) doc);
-            }
-        }
-		
-        /// <summary> Tests that all documents up to maxDoc which are *not* in the
-        /// expected result set, have an explanation which indicates no match
-        /// (ie: Explanation value of 0.0f)
-        /// </summary>
-        public static void  CheckNoMatchExplanations(Query q, System.String defaultFieldName, Searcher searcher, int[] results)
-        {
-			
-            System.String d = q.ToString(defaultFieldName);
-            System.Collections.Hashtable ignore = new System.Collections.Hashtable();
-            for (int i = 0; i < results.Length; i++)
-            {
-                ignore.Add((System.Int32) results[i], (System.Int32) results[i]);
-            }
-			
-            int maxDoc = searcher.MaxDoc();
-            for (int doc = 0; doc < maxDoc; doc++)
-            {
-                if (ignore.Contains((System.Int32) doc))
-                    continue;
+		private class AnonymousClassHitCollector : HitCollector
+		{
+			public AnonymousClassHitCollector(System.Collections.Hashtable actual)
+			{
+				InitBlock(actual);
+			}
+			private void  InitBlock(System.Collections.Hashtable actual)
+			{
+				this.actual = actual;
+			}
+			private System.Collections.Hashtable actual;
+			public override void  Collect(int doc, float score)
+			{
+				actual.Add((System.Int32) doc, (System.Int32) doc);
+			}
+		}
+		
+		/// <summary> Some explains methods calculate their vlaues though a slightly
+		/// differnet  order of operations from the acctaul scoring method ...
+		/// this allows for a small amount of variation
+		/// </summary>
+		public static float EXPLAIN_SCORE_TOLERANCE_DELTA = 0.00005f;
+		
+		/// <summary> Tests that all documents up to maxDoc which are *not* in the
+		/// expected result set, have an explanation which indicates no match
+		/// (ie: Explanation value of 0.0f)
+		/// </summary>
+		public static void  CheckNoMatchExplanations(Query q, System.String defaultFieldName, Searcher searcher, int[] results)
+		{
+			
+			System.String d = q.ToString(defaultFieldName);
+			System.Collections.Hashtable ignore = new System.Collections.Hashtable();
+			for (int i = 0; i < results.Length; i++)
+			{
+				ignore.Add((System.Int32) results[i], (System.Int32) results[i]);
+			}
+			
+			int maxDoc = searcher.MaxDoc();
+			for (int doc = 0; doc < maxDoc; doc++)
+			{
+				if (ignore.Contains((System.Int32) doc))
+					continue;
 				
-                Explanation exp = searcher.Explain(q, doc);
-                Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null");
-                Assert.AreEqual(0.0f, exp.GetValue(), 0.0f, "Explanation of [[" + d + "]] for #" + doc + " doesn't indicate non-match: " + exp.ToString());
-            }
-        }
-		
-        /// <summary> Tests that a query matches the an expected set of documents using a
-        /// HitCollector.
-        /// 
-        /// <p>
-        /// Note that when using the HitCollector API, documents will be collected
-        /// if they "match" regardless of what their score is.
-        /// </p>
-        /// </summary>
-        /// <param name="query">the query to test
-        /// </param>
-        /// <param name="searcher">the searcher to test the query against
-        /// </param>
-        /// <param name="defaultFieldName">used for displaing the query in assertion messages
-        /// </param>
-        /// <param name="results">a list of documentIds that must match the query
-        /// </param>
-        /// <seealso cref="Searcher.Search(Query,HitCollector)">
-        /// </seealso>
-        /// <seealso cref="checkHits">
-        /// </seealso>
-        public static void  CheckHitCollector(Query query, System.String defaultFieldName, Searcher searcher, int[] results)
-        {
-			
-            System.Collections.ArrayList correct = new System.Collections.ArrayList(results.Length);
-            for (int i = 0; i < results.Length; i++)
-            {
-                correct.Add(results[i]);
-            }
+				Explanation exp = searcher.Explain(q, doc);
+				Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null");
+				//Assert.AreEqual(0.0f, exp.GetValue(), 0.0f, "Explanation of [[" + d + "]] for #" + doc + " doesn't indicate non-match: " + exp.ToString());
+				// also check for NaN
+				Assert.AreEqual(0.0f, float.IsNaN(exp.GetValue()) ? 0.0f : exp.GetValue(), 0.0f, "Explanation of [[" + d + "]] for #" + doc + " doesn't indicate non-match: " + exp.ToString());
+			}
+		}
+		
+		/// <summary> Tests that a query matches the an expected set of documents using a
+		/// HitCollector.
+		/// 
+		/// <p>
+		/// Note that when using the HitCollector API, documents will be collected
+		/// if they "match" regardless of what their score is.
+		/// </p>
+		/// </summary>
+		/// <param name="query">the query to test
+		/// </param>
+		/// <param name="searcher">the searcher to test the query against
+		/// </param>
+		/// <param name="defaultFieldName">used for displaing the query in assertion messages
+		/// </param>
+		/// <param name="results">a list of documentIds that must match the query
+		/// </param>
+		/// <seealso cref="Searcher.Search(Query,HitCollector)">
+		/// </seealso>
+		/// <seealso cref="checkHits">
+		/// </seealso>
+		public static void  CheckHitCollector(Query query, System.String defaultFieldName, Searcher searcher, int[] results)
+		{
+			
+			System.Collections.ArrayList correct = new System.Collections.ArrayList(results.Length);
+			for (int i = 0; i < results.Length; i++)
+			{
+				correct.Add(results[i]);
+			}
+			
+			System.Collections.Hashtable actual = new System.Collections.Hashtable();
+			searcher.Search(query, new AnonymousClassHitCollector(actual));
+
+			System.Collections.IDictionaryEnumerator e = actual.GetEnumerator();
+			while (e.MoveNext())
+			{
+				Assert.Contains(e.Key, correct, query.ToString(defaultFieldName));
+			}
+			
+			QueryUtils.Check(query, searcher);
+		}
+		
+		/// <summary> Tests that a query matches the an expected set of documents using Hits.
+		/// 
+		/// <p>
+		/// Note that when using the Hits API, documents will only be returned
+		/// if they have a positive normalized score.
+		/// </p>
+		/// </summary>
+		/// <param name="query">the query to test
+		/// </param>
+		/// <param name="searcher">the searcher to test the query against
+		/// </param>
+		/// <param name="defaultFieldName">used for displaing the query in assertion messages
+		/// </param>
+		/// <param name="results">a list of documentIds that must match the query
+		/// </param>
+		/// <seealso cref="Searcher.Search(Query)">
+		/// </seealso>
+		/// <seealso cref="CheckHitCollector">
+		/// </seealso>
+		public static void  CheckHits_Renamed(Query query, System.String defaultFieldName, Searcher searcher, int[] results)
+		{
+			if (searcher is IndexSearcher)
+			{
+				QueryUtils.Check(query, (IndexSearcher) searcher);
+			}
+			
+			Hits hits = searcher.Search(query);
 			
-            System.Collections.Hashtable actual = new System.Collections.Hashtable();
-            searcher.Search(query, new AnonymousClassHitCollector(actual));
+			System.Collections.ArrayList correct = new System.Collections.ArrayList(results.Length);
+			for (int i = 0; i < results.Length; i++)
+			{
+				correct.Add(results[i]);
+			}
 
-            System.Collections.IDictionaryEnumerator e = actual.GetEnumerator();
-            while (e.MoveNext())
-            {
-                Assert.Contains(e.Key, correct, query.ToString(defaultFieldName));
-            }
-			
-            QueryUtils.Check(query, searcher);
-        }
-		
-        /// <summary> Tests that a query matches the an expected set of documents using Hits.
-        /// 
-        /// <p>
-        /// Note that when using the Hits API, documents will only be returned
-        /// if they have a positive normalized score.
-        /// </p>
-        /// </summary>
-        /// <param name="query">the query to test
-        /// </param>
-        /// <param name="searcher">the searcher to test the query against
-        /// </param>
-        /// <param name="defaultFieldName">used for displaing the query in assertion messages
-        /// </param>
-        /// <param name="results">a list of documentIds that must match the query
-        /// </param>
-        /// <seealso cref="Searcher.Search(Query)">
-        /// </seealso>
-        /// <seealso cref="CheckHitCollector">
-        /// </seealso>
-        public static void  CheckHits_Renamed(Query query, System.String defaultFieldName, Searcher searcher, int[] results)
-        {
-            if (searcher is IndexSearcher)
-            {
-                QueryUtils.Check(query, (IndexSearcher) searcher);
-            }
-			
-            Hits hits = searcher.Search(query);
-			
-            System.Collections.ArrayList correct = new System.Collections.ArrayList(results.Length);
-            for (int i = 0; i < results.Length; i++)
-            {
-                correct.Add(results[i]);
-            }
-			
-            for (int i = 0; i < hits.Length(); i++)
-            {
-                Assert.Contains(hits.Id(i), correct, query.ToString(defaultFieldName));
-            }
+			System.Collections.ArrayList actual = new System.Collections.ArrayList(hits.Length());
+			for (int i = 0; i < hits.Length(); i++)
+			{
+				actual.Add(hits.Id(i));
+			}
+			
+			Assert.AreEqual(correct, actual, query.ToString(defaultFieldName));
 			
-            QueryUtils.Check(query, searcher);
-        }
+			QueryUtils.Check(query, searcher);
+		}
 		
-        /// <summary>Tests that a Hits has an expected order of documents </summary>
+		/// <summary>Tests that a Hits has an expected order of documents </summary>
 		public static void  CheckDocIds(System.String mes, int[] results, Hits hits)
 		{
 			Assert.AreEqual(results.Length, hits.Length(), mes + " nr of hits");
@@ -205,15 +216,15 @@
 				end = System.Math.Max(len1, len2);
 			}
 			
-			sb.Append("Hits length1=" + len1 + "\tlength2=" + len2);
+			sb.Append("Hits length1=").Append(len1).Append("\tlength2=").Append(len2);
 			
-			sb.Append("\n");
+			sb.Append('\n');
 			for (int i = start; i < end; i++)
 			{
-				sb.Append("hit=" + i + ":");
+				sb.Append("hit=").Append(i).Append(':');
 				if (i < len1)
 				{
-					sb.Append(" doc" + hits1.Id(i) + "=" + hits1.Score(i));
+					sb.Append(" doc").Append(hits1.Id(i)).Append('=').Append(hits1.Score(i));
 				}
 				else
 				{
@@ -222,9 +233,9 @@
 				sb.Append(",\t");
 				if (i < len2)
 				{
-					sb.Append(" doc" + hits2.Id(i) + "=" + hits2.Score(i));
+					sb.Append(" doc").Append(hits2.Id(i)).Append('=').Append(hits2.Score(i));
 				}
-				sb.Append("\n");
+				sb.Append('\n');
 			}
 			return sb.ToString();
 		}
@@ -233,136 +244,275 @@
 		public static System.String TopdocsString(TopDocs docs, int start, int end)
 		{
 			System.Text.StringBuilder sb = new System.Text.StringBuilder();
-			sb.Append("TopDocs totalHits=" + docs.totalHits + " top=" + docs.scoreDocs.Length + "\n");
+			sb.Append("TopDocs totalHits=").Append(docs.totalHits).Append(" top=").Append(docs.scoreDocs.Length).Append('\n');
 			if (end <= 0)
 				end = docs.scoreDocs.Length;
 			else
 				end = System.Math.Min(end, docs.scoreDocs.Length);
 			for (int i = start; i < end; i++)
 			{
-				sb.Append("\t");
+				sb.Append('\t');
 				sb.Append(i);
 				sb.Append(") doc=");
 				sb.Append(docs.scoreDocs[i].doc);
 				sb.Append("\tscore=");
 				sb.Append(docs.scoreDocs[i].score);
-				sb.Append("\n");
+				sb.Append('\n');
 			}
 			return sb.ToString();
 		}
 		
-        /// <summary> Asserts that the score explanation for every document matching a
-        /// query corrisponds with the true score.
-        /// 
-        /// </summary>
-        /// <seealso cref="ExplanationAsserter">
-        /// </seealso>
-        /// <param name="query">the query to test
-        /// </param>
-        /// <param name="searcher">the searcher to test the query against
-        /// </param>
-        /// <param name="defaultFieldName">used for displaing the query in assertion messages
-        /// </param>
-        public static void  CheckExplanations(Query query, System.String defaultFieldName, Searcher searcher)
-        {
-			
-            searcher.Search(query, new ExplanationAsserter(query, defaultFieldName, searcher));
-        }
-		
-        /// <summary> an IndexSearcher that implicitly checks hte explanation of every match
-        /// whenever it executes a search
-        /// </summary>
-        public class ExplanationAssertingSearcher : IndexSearcher
-        {
-            public ExplanationAssertingSearcher(Directory d) : base(d)
-            {
-            }
-            public ExplanationAssertingSearcher(IndexReader r) : base(r)
-            {
-            }
-            protected internal virtual void  CheckExplanations(Query q)
-            {
-                base.Search(q, null, new ExplanationAsserter(q, null, this));
-            }
-            public virtual Hits search(Query query, Filter filter)
-            {
-                CheckExplanations(query);
-                return base.Search(query, filter);
-            }
-            public override Hits Search(Query query, Sort sort)
-            {
-                CheckExplanations(query);
-                return base.Search(query, sort);
-            }
-            public override Hits Search(Query query, Filter filter, Sort sort)
-            {
-                CheckExplanations(query);
-                return base.Search(query, filter, sort);
-            }
-            public override TopFieldDocs Search(Query query, Filter filter, int n, Sort sort)
-            {
+		/// <summary> Asserts that the explanation value for every document matching a
+		/// query corresponds with the true score. 
+		/// 
+		/// </summary>
+		/// <seealso cref="ExplanationAsserter">
+		/// </seealso>
+		/// <seealso cref="CheckExplanations(Query, String, Searcher, boolean) for a">
+		/// "deep" testing of the explanation details.
+		/// 
+		/// </seealso>
+		/// <param name="query">the query to test
+		/// </param>
+		/// <param name="searcher">the searcher to test the query against
+		/// </param>
+		/// <param name="defaultFieldName">used for displaing the query in assertion messages
+		/// </param>
+		public static void  CheckExplanations(Query query, System.String defaultFieldName, Searcher searcher)
+		{
+			CheckExplanations(query, defaultFieldName, searcher, false);
+		}
+		
+		/// <summary> Asserts that the explanation value for every document matching a
+		/// query corresponds with the true score.  Optionally does "deep" 
+		/// testing of the explanation details.
+		/// 
+		/// </summary>
+		/// <seealso cref="ExplanationAsserter">
+		/// </seealso>
+		/// <param name="query">the query to test
+		/// </param>
+		/// <param name="searcher">the searcher to test the query against
+		/// </param>
+		/// <param name="defaultFieldName">used for displaing the query in assertion messages
+		/// </param>
+		/// <param name="deep">indicates whether a deep comparison of sub-Explanation details should be executed
+		/// </param>
+		public static void  CheckExplanations(Query query, System.String defaultFieldName, Searcher searcher, bool deep)
+		{
+			
+			searcher.Search(query, new ExplanationAsserter(query, defaultFieldName, searcher, deep));
+		}
+		
+		/// <summary> Assert that an explanation has the expected score, and optionally that its
+		/// sub-details max/sum/factor match to that score.
+		/// 
+		/// </summary>
+		/// <param name="q">String representation of the query for assertion messages
+		/// </param>
+		/// <param name="doc">Document ID for assertion messages
+		/// </param>
+		/// <param name="score">Real score value of doc with query q
+		/// </param>
+		/// <param name="deep">indicates whether a deep comparison of sub-Explanation details should be executed
+		/// </param>
+		/// <param name="expl">The Explanation to match against score
+		/// </param>
+		public static void  VerifyExplanation(System.String q, int doc, float score, bool deep, Explanation expl)
+		{
+			float value_Renamed = expl.GetValue();
+			Assert.AreEqual(score, value_Renamed, EXPLAIN_SCORE_TOLERANCE_DELTA, q + ": score(doc=" + doc + ")=" + score + " != explanationScore=" + value_Renamed + " Explanation: " + expl);
+			
+			if (!deep)
+				return ;
+			
+			Explanation[] detail = expl.GetDetails();
+			if (detail != null)
+			{
+				if (detail.Length == 1)
+				{
+					// simple containment, no matter what the description says, 
+					// just verify contained expl has same score
+					VerifyExplanation(q, doc, score, deep, detail[0]);
+				}
+				else
+				{
+					// explanation must either:
+					// - end with one of: "product of:", "sum of:", "max of:", or
+					// - have "max plus <x> times others" (where <x> is float).
+					float x = 0;
+					System.String descr = expl.GetDescription().ToLower();
+					bool productOf = descr.EndsWith("product of:");
+					bool sumOf = descr.EndsWith("sum of:");
+					bool maxOf = descr.EndsWith("max of:");
+					bool maxTimesOthers = false;
+					if (!(productOf || sumOf || maxOf))
+					{
+						// maybe 'max plus x times others'
+						int k1 = descr.IndexOf("max plus ");
+						if (k1 >= 0)
+						{
+							k1 += "max plus ".Length;
+							int k2 = descr.IndexOf(" ", k1);
+							try
+							{
+								x = System.Single.Parse(descr.Substring(k1, (k2) - (k1)).Trim());
+								if (descr.Substring(k2).Trim().Equals("times others of:"))
+								{
+									maxTimesOthers = true;
+								}
+							}
+							catch (System.FormatException)
+							{
+							}
+						}
+					}
+					Assert.IsTrue(productOf || sumOf || maxOf || maxTimesOthers, q + ": multi valued explanation description=\"" + descr + "\" must be 'max of plus x times others' or end with 'product of'" + " or 'sum of:' or 'max of:' - " + expl);
+					float sum = 0;
+					float product = 1;
+					float max = 0;
+					for (int i = 0; i < detail.Length; i++)
+					{
+						float dval = detail[i].GetValue();
+						VerifyExplanation(q, doc, dval, deep, detail[i]);
+						product *= dval;
+						sum += dval;
+						max = System.Math.Max(max, dval);
+					}
+					float combined = 0;
+					if (productOf)
+					{
+						combined = product;
+					}
+					else if (sumOf)
+					{
+						combined = sum;
+					}
+					else if (maxOf)
+					{
+						combined = max;
+					}
+					else if (maxTimesOthers)
+					{
+						combined = max + x * (sum - max);
+					}
+					else
+					{
+						Assert.IsTrue(false, "should never get here!");
+					}
+					Assert.AreEqual(combined, value_Renamed, EXPLAIN_SCORE_TOLERANCE_DELTA, q + ": actual subDetails combined==" + combined + " != value=" + value_Renamed + " Explanation: " + expl);
+				}
+			}
+		}
+		
+		/// <summary> an IndexSearcher that implicitly checks hte explanation of every match
+		/// whenever it executes a search.
+		/// 
+		/// </summary>
+		/// <seealso cref="ExplanationAsserter">
+		/// </seealso>
+		public class ExplanationAssertingSearcher : IndexSearcher
+		{
+			public ExplanationAssertingSearcher(Directory d) : base(d)
+			{
+			}
+			public ExplanationAssertingSearcher(IndexReader r) : base(r)
+			{
+			}
+			protected internal virtual void  CheckExplanations(Query q)
+			{
+				base.Search(q, null, new ExplanationAsserter(q, null, this));
+			}
+			public override Hits Search(Query query, Filter filter)
+			{
+				CheckExplanations(query);
+				return base.Search(query, filter);
+			}
+			public override Hits Search(Query query, Sort sort)
+			{
+				CheckExplanations(query);
+				return base.Search(query, sort);
+			}
+			public override Hits Search(Query query, Filter filter, Sort sort)
+			{
+				CheckExplanations(query);
+				return base.Search(query, filter, sort);
+			}
+			public override TopFieldDocs Search(Query query, Filter filter, int n, Sort sort)
+			{
 				
-                CheckExplanations(query);
-                return base.Search(query, filter, n, sort);
-            }
-            public override void  Search(Query query, HitCollector results)
-            {
-                CheckExplanations(query);
-                base.Search(query, results);
-            }
-            public override void  Search(Query query, Filter filter, HitCollector results)
-            {
-                CheckExplanations(query);
-                base.Search(query, filter, results);
-            }
-            public override TopDocs Search(Query query, Filter filter, int n)
-            {
+				CheckExplanations(query);
+				return base.Search(query, filter, n, sort);
+			}
+			public override void  Search(Query query, HitCollector results)
+			{
+				CheckExplanations(query);
+				base.Search(query, results);
+			}
+			public override void  Search(Query query, Filter filter, HitCollector results)
+			{
+				CheckExplanations(query);
+				base.Search(query, filter, results);
+			}
+			public override TopDocs Search(Query query, Filter filter, int n)
+			{
 				
-                CheckExplanations(query);
-                return base.Search(query, filter, n);
-            }
-        }
-		
-        /// <summary> Asserts that the score explanation for every document matching a
-        /// query corrisponds with the true score.
-        /// 
-        /// NOTE: this HitCollector should only be used with the Query and Searcher
-        /// specified at when it is constructed.
-        /// </summary>
-        public class ExplanationAsserter : HitCollector
-        {
-			
-            /// <summary> Some explains methods calculate their vlaues though a slightly
-            /// differnet  order of operations from the acctaul scoring method ...
-            /// this allows for a small amount of variation
-            /// </summary>
-            public static float SCORE_TOLERANCE_DELTA = 0.00005f;
-			
-            internal Query q;
-            internal Searcher s;
-            internal System.String d;
-            public ExplanationAsserter(Query q, System.String defaultFieldName, Searcher s)
-            {
-                this.q = q;
-                this.s = s;
-                this.d = q.ToString(defaultFieldName);
-            }
-            public override void  Collect(int doc, float score)
-            {
-                Explanation exp = null;
+				CheckExplanations(query);
+				return base.Search(query, filter, n);
+			}
+		}
+		
+		/// <summary> Asserts that the score explanation for every document matching a
+		/// query corresponds with the true score.
+		/// 
+		/// NOTE: this HitCollector should only be used with the Query and Searcher
+		/// specified at when it is constructed.
+		/// 
+		/// </summary>
+		/// <seealso cref="CheckHits.verifyExplanation">
+		/// </seealso>
+		public class ExplanationAsserter : HitCollector
+		{
+			
+			/// <deprecated>
+			/// </deprecated>
+			/// <seealso cref="CheckHits.EXPLAIN_SCORE_TOLERANCE_DELTA">
+			/// </seealso>
+			public static float SCORE_TOLERANCE_DELTA = 0.00005f;
+			
+			internal Query q;
+			internal Searcher s;
+			internal System.String d;
+			internal bool deep;
+			
+			/// <summary>Constructs an instance which does shallow tests on the Explanation </summary>
+			public ExplanationAsserter(Query q, System.String defaultFieldName, Searcher s):this(q, defaultFieldName, s, false)
+			{
+			}
+			public ExplanationAsserter(Query q, System.String defaultFieldName, Searcher s, bool deep)
+			{
+				this.q = q;
+				this.s = s;
+				this.d = q.ToString(defaultFieldName);
+				this.deep = deep;
+			}
+
+			public override void  Collect(int doc, float score)
+			{
+				Explanation exp = null;
 				
-                try
-                {
-                    exp = s.Explain(q, doc);
-                }
-                catch (System.IO.IOException e)
-                {
-                    throw new System.SystemException("exception in hitcollector of [[" + d + "]] for #" + doc, e);
-                }
+				try
+				{
+					exp = s.Explain(q, doc);
+				}
+				catch (System.IO.IOException e)
+				{
+					throw new System.SystemException("exception in hitcollector of [[" + d + "]] for #" + doc, e);
+				}
 				
-                Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null");
-                Assert.AreEqual(score, exp.GetValue(), SCORE_TOLERANCE_DELTA, "Score of [[" + d + "]] for #" + doc + " does not match explanation: " + exp.ToString());
-            }
-        }
-    }
+				Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null");
+				Assert.AreEqual(score, exp.GetValue(), SCORE_TOLERANCE_DELTA, "Score of [[" + d + "]] for #" + doc + " does not match explanation: " + exp.ToString());
+			}
+		}
+	}
 }
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Search/Function/FunctionTestSetup.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/Function/FunctionTestSetup.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/Function/FunctionTestSetup.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/Function/FunctionTestSetup.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,157 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using Fieldable = Lucene.Net.Documents.Fieldable;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+
+namespace Lucene.Net.Search.Function
+{
+	
+	/// <summary> Setup for function tests</summary>
+	[TestFixture]
+	public abstract class FunctionTestSetup : LuceneTestCase
+	{
+		
+		/// <summary> Actual score computation order is slightly different than assumptios
+		/// this allows for a small amount of variation
+		/// </summary>
+		public static float TEST_SCORE_TOLERANCE_DELTA = 0.001f;
+		
+		protected internal const bool DBG = false; // change to true for logging to print
+		
+		protected internal const int N_DOCS = 17; // select a primary number > 2
+		
+		protected internal const System.String ID_FIELD = "id";
+		protected internal const System.String TEXT_FIELD = "text";
+		protected internal const System.String INT_FIELD = "iii";
+		protected internal const System.String FLOAT_FIELD = "fff";
+		
+		private static readonly System.String[] DOC_TEXT_LINES = new System.String[]{"Well, this is just some plain text we use for creating the ", "test documents. It used to be a text from an online collection ", "devoted to first aid, but if there was there an (online) lawyers ", "first aid collection with legal advices, \"it\" might have quite ", "probably advised one not to include \"it\"'s text or the text of ", "any other online collection in one's code, unless one has money ", "that one don't need and one is happy to donate for lawyers ", "charity. Anyhow at some point, rechecking the usage of this text, ", "it became uncertain that this text is free to use, because ", "the web site in the disclaimer of he eBook containing that text ", "was not responding anymore, and at the same time, in projGut, ", "searching for first aid no longer found that eBook as well. ", "So here we are, with a perhaps much less interesting ", "text for the test, but oh much much safer. "};
+		
+		protected internal Directory dir;
+		protected internal Analyzer anlzr;
+		
+		/* @override constructor */
+		//public FunctionTestSetup(System.String name):base(name)
+		//{
+		//}
+		
+		/* @override */
+		[TearDown]
+		public override void  TearDown()
+		{
+			base.TearDown();
+			base.TearDown();
+			dir = null;
+			anlzr = null;
+		}
+		
+		/* @override */
+		[SetUp]
+		public override void  SetUp()
+		{
+			base.SetUp();
+			// prepare a small index with just a few documents.  
+			dir = new RAMDirectory();
+			anlzr = new StandardAnalyzer();
+			IndexWriter iw = new IndexWriter(dir, anlzr);
+			// add docs not exactly in natural ID order, to verify we do check the order of docs by scores
+			int remaining = N_DOCS;
+			bool[] done = new bool[N_DOCS];
+			int i = 0;
+			while (remaining > 0)
+			{
+				if (done[i])
+				{
+					throw new System.Exception("to set this test correctly N_DOCS=" + N_DOCS + " must be primary and greater than 2!");
+				}
+				AddDoc(iw, i);
+				done[i] = true;
+				i = (i + 4) % N_DOCS;
+				remaining--;
+			}
+			iw.Close();
+		}
+		
+		private void  AddDoc(IndexWriter iw, int i)
+		{
+			Document d = new Document();
+			Fieldable f;
+			int scoreAndID = i + 1;
+			
+			f = new Field(ID_FIELD, Id2String(scoreAndID), Field.Store.YES, Field.Index.UN_TOKENIZED); // for debug purposes
+			f.SetOmitNorms(true);
+			d.Add(f);
+			
+			f = new Field(TEXT_FIELD, "text of doc" + scoreAndID + TextLine(i), Field.Store.NO, Field.Index.TOKENIZED); // for regular search
+			f.SetOmitNorms(true);
+			d.Add(f);
+			
+			f = new Field(INT_FIELD, "" + scoreAndID, Field.Store.NO, Field.Index.UN_TOKENIZED); // for function scoring
+			f.SetOmitNorms(true);
+			d.Add(f);
+			
+			f = new Field(FLOAT_FIELD, scoreAndID + ".000", Field.Store.NO, Field.Index.UN_TOKENIZED); // for function scoring
+			f.SetOmitNorms(true);
+			d.Add(f);
+			
+			iw.AddDocument(d);
+			Log("added: " + d);
+		}
+		
+		// 17 --> ID00017
+		protected internal virtual System.String Id2String(int scoreAndID)
+		{
+			System.String s = "000000000" + scoreAndID;
+			int n = ("" + N_DOCS).Length + 3;
+			int k = s.Length - n;
+			return "ID" + s.Substring(k);
+		}
+		
+		// some text line for regular search
+		private System.String TextLine(int docNum)
+		{
+			return DOC_TEXT_LINES[docNum % DOC_TEXT_LINES.Length];
+		}
+		
+		// extract expected doc score from its ID Field: "ID7" --> 7.0
+		protected internal virtual float ExpectedFieldScore(System.String docIDFieldVal)
+		{
+			return System.Single.Parse(docIDFieldVal.Substring(2));
+		}
+		
+		// debug messages (change DBG to true for anything to print) 
+		protected internal virtual void  Log(System.Object o)
+		{
+			if (DBG)
+			{
+				System.Console.Out.WriteLine(o.ToString());
+			}
+		}
+	}
+}
\ No newline at end of file