You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ar...@apache.org on 2007/08/11 18:56:44 UTC

svn commit: r564939 [6/8] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/Standard/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/Q...

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDateFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDateFilter.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDateFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDateFilter.cs Sat Aug 11 09:56:37 2007
@@ -16,14 +16,16 @@
  */
 
 using System;
+
+using NUnit.Framework;
+
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
-using DateField = Lucene.Net.Documents.DateField;
+using DateTools = Lucene.Net.Documents.DateTools;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using NUnit.Framework;
 
 namespace Lucene.Net.Search
 {
@@ -33,8 +35,8 @@
 	/// </summary>
 	/// <author>  Otis Gospodnetic
 	/// </author>
-	/// <version>  $Revision: 150487 $
-	/// </version>
+    /// <version>  $Revision: 472959 $
+    /// </version>
 	[TestFixture]
     public class TestDateFilter
 	{
@@ -47,7 +49,7 @@
 			RAMDirectory indexStore = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
 			
-			long now = System.DateTime.Now.Ticks;;
+			long now = System.DateTime.Now.Millisecond;
 			
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
 			// add time that is in the past
@@ -104,7 +106,7 @@
 			RAMDirectory indexStore = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
 			
-			long now = System.DateTime.Now.Ticks;
+			long now = System.DateTime.Now.Millisecond;
 			
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
 			// add time that is in the future

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDisjunctionMaxQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDisjunctionMaxQuery.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDisjunctionMaxQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDisjunctionMaxQuery.cs Sat Aug 11 09:56:37 2007
@@ -16,6 +16,9 @@
  */
 
 using System;
+
+using NUnit.Framework;
+
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
@@ -24,7 +27,6 @@
 using Term = Lucene.Net.Index.Term;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using NUnit.Framework;
 
 namespace Lucene.Net.Search
 {
@@ -148,6 +150,7 @@
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
 			q.Add(Tq("hed", "albino"));
 			q.Add(Tq("hed", "elephant"));
+            QueryUtils.Check(q, s);
 			
 			Hits h = s.Search(q);
 			
@@ -175,6 +178,8 @@
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
 			q.Add(Tq("dek", "albino"));
 			q.Add(Tq("dek", "elephant"));
+            QueryUtils.Check(q, s);
+
 			
 			Hits h = s.Search(q);
 			
@@ -203,6 +208,8 @@
 			q.Add(Tq("hed", "elephant"));
 			q.Add(Tq("dek", "albino"));
 			q.Add(Tq("dek", "elephant"));
+            QueryUtils.Check(q, s);
+
 			
 			Hits h = s.Search(q);
 			
@@ -229,6 +236,8 @@
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.01f);
 			q.Add(Tq("dek", "albino"));
 			q.Add(Tq("dek", "elephant"));
+            QueryUtils.Check(q, s);
+			
 			
 			Hits h = s.Search(q);
 			
@@ -259,14 +268,17 @@
 				q1.Add(Tq("hed", "albino"));
 				q1.Add(Tq("dek", "albino"));
                 q.Add(q1, BooleanClause.Occur.MUST); //false,false);
+                QueryUtils.Check(q1, s);
             }
 			{
 				DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(0.0f);
 				q2.Add(Tq("hed", "elephant"));
 				q2.Add(Tq("dek", "elephant"));
                 q.Add(q2, BooleanClause.Occur.MUST); //false,false);
+                QueryUtils.Check(q2, s);
             }
 			
+            QueryUtils.Check(q, s);
 			
 			Hits h = s.Search(q);
 			
@@ -303,6 +315,7 @@
 				q2.Add(Tq("dek", "elephant"));
                 q.Add(q2, BooleanClause.Occur.SHOULD); //false,false);
             }
+            QueryUtils.Check(q, s);
 			
 			
 			Hits h = s.Search(q);
@@ -344,6 +357,7 @@
 				q2.Add(Tq("dek", "elephant"));
                 q.Add(q2, BooleanClause.Occur.SHOULD); //false,false);
             }
+            QueryUtils.Check(q, s);
 			
 			
 			Hits h = s.Search(q);
@@ -396,6 +410,7 @@
 				q2.Add(Tq("dek", "elephant"));
                 q.Add(q2, BooleanClause.Occur.SHOULD); //false,false);
             }
+            QueryUtils.Check(q, s);
 			
 			
 			Hits h = s.Search(q);

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDocBoost.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDocBoost.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDocBoost.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDocBoost.cs Sat Aug 11 09:56:37 2007
@@ -16,10 +16,11 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using Lucene.Net.Documents;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
@@ -70,8 +71,8 @@
 			RAMDirectory store = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true);
 			
-			Field f1 = new Field("field", "word", Field.Store.YES, Field.Index.TOKENIZED);
-			Field f2 = new Field("field", "word", Field.Store.YES, Field.Index.TOKENIZED);
+			Fieldable f1 = new Field("field", "word", Field.Store.YES, Field.Index.TOKENIZED);
+			Fieldable f2 = new Field("field", "word", Field.Store.YES, Field.Index.TOKENIZED);
 			f2.SetBoost(2.0f);
 			
 			Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestExplanations.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestExplanations.cs?view=auto&rev=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestExplanations.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestExplanations.cs Sat Aug 11 09:56:37 2007
@@ -0,0 +1,258 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Lucene.Net.Search.Spans;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using Term = Lucene.Net.Index.Term;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
+using ParseException = Lucene.Net.QueryParsers.ParseException;
+
+namespace Lucene.Net.Search
+{
+	
+	/// <summary> Tests primative queries (ie: that rewrite to themselves) to
+	/// insure they match the expected set of docs, and that the score of each
+	/// match is equal to the value of the scores explanation.
+	/// 
+	/// <p>
+	/// The assumption is that if all of the "primative" queries work well,
+	/// then anythingthat rewrites to a primative will work well also.
+	/// </p>
+	/// 
+	/// </summary>
+	/// <seealso cref=""Subclasses for actual tests"">
+	/// </seealso>
+	[TestFixture]
+    public class TestExplanations
+	{
+		protected internal IndexSearcher searcher;
+		
+		public const System.String FIELD = "field";
+		public static readonly Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser(FIELD, new WhitespaceAnalyzer());
+		
+        [TearDown]
+		public virtual void  TearDown()
+		{
+			searcher.Close();
+		}
+		
+        [SetUp]
+		public virtual void  SetUp()
+		{
+			RAMDirectory directory = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+			for (int i = 0; i < docFields.Length; i++)
+			{
+				Document doc = new Document();
+				doc.Add(new Field(FIELD, docFields[i], Field.Store.NO, Field.Index.TOKENIZED));
+				writer.AddDocument(doc);
+			}
+			writer.Close();
+			searcher = new IndexSearcher(directory);
+		}
+		
+		protected internal System.String[] docFields = new System.String[]{"w1 w2 w3 w4 w5", "w1 w3 w2 w3 zz", "w1 xx w2 yy w3", "w1 w3 xx w2 yy w3 zz"};
+		
+		public virtual Query MakeQuery(System.String queryText)
+		{
+			return qp.Parse(queryText);
+		}
+		
+		public virtual void  Qtest(System.String queryText, int[] expDocNrs)
+		{
+			Qtest(MakeQuery(queryText), expDocNrs);
+		}
+		public virtual void  Qtest(Query q, int[] expDocNrs)
+		{
+			// check that the expDocNrs first, then check the explanations
+			CheckHits.CheckHitCollector(q, FIELD, searcher, expDocNrs);
+			CheckHits.CheckExplanations(q, FIELD, searcher);
+		}
+		
+		/// <summary> Tests a query using qtest after wrapping it with both optB and reqB</summary>
+		/// <seealso cref="Qtest">
+		/// </seealso>
+		/// <seealso cref="ReqB">
+		/// </seealso>
+		/// <seealso cref="OptB">
+		/// </seealso>
+		public virtual void  Bqtest(Query q, int[] expDocNrs)
+		{
+			Qtest(ReqB(q), expDocNrs);
+			Qtest(OptB(q), expDocNrs);
+		}
+		/// <summary> Tests a query using qtest after wrapping it with both optB and reqB</summary>
+		/// <seealso cref="Qtest">
+		/// </seealso>
+		/// <seealso cref="ReqB">
+		/// </seealso>
+		/// <seealso cref="OptB">
+		/// </seealso>
+		public virtual void  Bqtest(System.String queryText, int[] expDocNrs)
+		{
+			Bqtest(MakeQuery(queryText), expDocNrs);
+		}
+		
+		/// <summary>A filter that only lets the specified document numbers pass </summary>
+		[Serializable]
+		public class ItemizedFilter : Filter
+		{
+			internal int[] docs;
+			public ItemizedFilter(int[] docs)
+			{
+				this.docs = docs;
+			}
+			public override System.Collections.BitArray Bits(IndexReader r)
+			{
+				System.Collections.BitArray b = new System.Collections.BitArray((r.MaxDoc() % 64 == 0?r.MaxDoc() / 64:r.MaxDoc() / 64 + 1) * 64);
+				for (int i = 0; i < docs.Length; i++)
+				{
+					b.Set(docs[i], true);
+				}
+				return b;
+			}
+		}
+		
+		/// <summary>helper for generating MultiPhraseQueries </summary>
+		public static Term[] Ta(System.String[] s)
+		{
+			Term[] t = new Term[s.Length];
+			for (int i = 0; i < s.Length; i++)
+			{
+				t[i] = new Term(FIELD, s[i]);
+			}
+			return t;
+		}
+		
+		/// <summary>MACRO for SpanTermQuery </summary>
+		public virtual SpanTermQuery St(System.String s)
+		{
+			return new SpanTermQuery(new Term(FIELD, s));
+		}
+		
+		/// <summary>MACRO for SpanNotQuery </summary>
+		public virtual SpanNotQuery Snot(SpanQuery i, SpanQuery e)
+		{
+			return new SpanNotQuery(i, e);
+		}
+		
+		/// <summary>MACRO for SpanOrQuery containing two SpanTerm queries </summary>
+		public virtual SpanOrQuery Sor(System.String s, System.String e)
+		{
+			return Sor(St(s), St(e));
+		}
+		/// <summary>MACRO for SpanOrQuery containing two SpanQueries </summary>
+		public virtual SpanOrQuery Sor(SpanQuery s, SpanQuery e)
+		{
+			return new SpanOrQuery(new SpanQuery[]{s, e});
+		}
+		
+		/// <summary>MACRO for SpanOrQuery containing three SpanTerm queries </summary>
+		public virtual SpanOrQuery Sor(System.String s, System.String m, System.String e)
+		{
+			return Sor(St(s), St(m), St(e));
+		}
+		/// <summary>MACRO for SpanOrQuery containing two SpanQueries </summary>
+		public virtual SpanOrQuery Sor(SpanQuery s, SpanQuery m, SpanQuery e)
+		{
+			return new SpanOrQuery(new SpanQuery[]{s, m, e});
+		}
+		
+		/// <summary>MACRO for SpanNearQuery containing two SpanTerm queries </summary>
+		public virtual SpanNearQuery Snear(System.String s, System.String e, int slop, bool inOrder)
+		{
+			return Snear(St(s), St(e), slop, inOrder);
+		}
+		/// <summary>MACRO for SpanNearQuery containing two SpanQueries </summary>
+		public virtual SpanNearQuery Snear(SpanQuery s, SpanQuery e, int slop, bool inOrder)
+		{
+			return new SpanNearQuery(new SpanQuery[]{s, e}, slop, inOrder);
+		}
+		
+		
+		/// <summary>MACRO for SpanNearQuery containing three SpanTerm queries </summary>
+		public virtual SpanNearQuery Snear(System.String s, System.String m, System.String e, int slop, bool inOrder)
+		{
+			return Snear(St(s), St(m), St(e), slop, inOrder);
+		}
+		/// <summary>MACRO for SpanNearQuery containing three SpanQueries </summary>
+		public virtual SpanNearQuery Snear(SpanQuery s, SpanQuery m, SpanQuery e, int slop, bool inOrder)
+		{
+			return new SpanNearQuery(new SpanQuery[]{s, m, e}, slop, inOrder);
+		}
+		
+		/// <summary>MACRO for SpanFirst(SpanTermQuery) </summary>
+		public virtual SpanFirstQuery Sf(System.String s, int b)
+		{
+			return new SpanFirstQuery(St(s), b);
+		}
+		
+		/// <summary> MACRO: Wraps a Query in a BooleanQuery so that it is optional, along
+		/// with a second prohibited clause which will never match anything
+		/// </summary>
+		public virtual Query OptB(System.String q)
+		{
+			return OptB(MakeQuery(q));
+		}
+		/// <summary> MACRO: Wraps a Query in a BooleanQuery so that it is optional, along
+		/// with a second prohibited clause which will never match anything
+		/// </summary>
+		public virtual Query OptB(Query q)
+		{
+			BooleanQuery bq = new BooleanQuery(true);
+			bq.Add(q, BooleanClause.Occur.SHOULD);
+			bq.Add(new TermQuery(new Term("NEVER", "MATCH")), BooleanClause.Occur.MUST_NOT);
+			return bq;
+		}
+		
+		/// <summary> MACRO: Wraps a Query in a BooleanQuery so that it is required, along
+		/// with a second optional clause which will match everything
+		/// </summary>
+		public virtual Query ReqB(System.String q)
+		{
+			return ReqB(MakeQuery(q));
+		}
+		/// <summary> MACRO: Wraps a Query in a BooleanQuery so that it is required, along
+		/// with a second optional clause which will match everything
+		/// </summary>
+		public virtual Query ReqB(Query q)
+		{
+			BooleanQuery bq = new BooleanQuery(true);
+			bq.Add(q, BooleanClause.Occur.MUST);
+			bq.Add(new TermQuery(new Term(FIELD, "w1")), BooleanClause.Occur.SHOULD);
+			return bq;
+		}
+		
+		/// <summary> Placeholder: JUnit freaks if you don't have one test ... making
+		/// class abstract doesn't help
+		/// </summary>
+        [Test]
+        public virtual void  TestNoop()
+		{
+			/* NOOP */
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestFilteredQuery.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredQuery.cs Sat Aug 11 09:56:37 2007
@@ -16,12 +16,14 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
+using IndexReader = Lucene.Net.Index.IndexReader;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 
@@ -35,14 +37,13 @@
 	/// </summary>
 	/// <author>   Tim Jones
 	/// </author>
-	/// <version>  $Id: TestFilteredQuery.java 150585 2004-10-10 15:44:45Z dnaber $
-	/// </version>
+    /// <version>  $Id: TestFilteredQuery.java 472959 2006-11-09 16:21:50Z yonik $
+    /// </version>
 	/// <since>   1.4
 	/// </since>
 	[TestFixture]
     public class TestFilteredQuery
 	{
-		//UPGRADE_NOTE: Field 'EnclosingInstance' was added to class 'AnonymousClassFilter' to access its enclosing instance. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1019'"
 		[Serializable]
 		private class AnonymousClassFilter : Filter
 		{
@@ -125,6 +126,7 @@
 			Hits hits = searcher.Search(filteredquery);
 			Assert.AreEqual(1, hits.Length());
 			Assert.AreEqual(1, hits.Id(0));
+            QueryUtils.Check(filteredquery, searcher);
 			
 			hits = searcher.Search(filteredquery, new Sort("sorter"));
 			Assert.AreEqual(1, hits.Length());
@@ -133,16 +135,19 @@
 			filteredquery = new FilteredQuery(new TermQuery(new Term("field", "one")), filter);
 			hits = searcher.Search(filteredquery);
 			Assert.AreEqual(2, hits.Length());
+            QueryUtils.Check(filteredquery, searcher);
 			
 			filteredquery = new FilteredQuery(new TermQuery(new Term("field", "x")), filter);
 			hits = searcher.Search(filteredquery);
 			Assert.AreEqual(1, hits.Length());
 			Assert.AreEqual(3, hits.Id(0));
+            QueryUtils.Check(filteredquery, searcher);
 			
 			filteredquery = new FilteredQuery(new TermQuery(new Term("field", "y")), filter);
 			hits = searcher.Search(filteredquery);
 			Assert.AreEqual(0, hits.Length());
-		}
+            QueryUtils.Check(filteredquery, searcher);
+        }
 		
 		/// <summary> This tests FilteredQuery's rewrite correctness</summary>
 		[Test]
@@ -153,6 +158,20 @@
 			Query filteredquery = new FilteredQuery(rq, filter);
 			Hits hits = searcher.Search(filteredquery);
 			Assert.AreEqual(2, hits.Length());
-		}
-	}
+            QueryUtils.Check(filteredquery, searcher);
+        }
+
+        [Test]		
+        public virtual void  TestBoolean()
+        {
+            BooleanQuery bq = new BooleanQuery();
+            Query query = new FilteredQuery(new MatchAllDocsQuery(), new Lucene.Net.search.SingleDocTestFilter(0));
+            bq.Add(query, BooleanClause.Occur.MUST);
+            query = new FilteredQuery(new MatchAllDocsQuery(), new Lucene.Net.search.SingleDocTestFilter(1));
+            bq.Add(query, BooleanClause.Occur.MUST);
+            Hits hits = searcher.Search(bq);
+            Assert.AreEqual(0, hits.Length());
+            QueryUtils.Check(query, searcher);
+        }
+    }
 }

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestFuzzyQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestFuzzyQuery.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestFuzzyQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestFuzzyQuery.cs Sat Aug 11 09:56:37 2007
@@ -16,7 +16,9 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMatchAllDocsQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMatchAllDocsQuery.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMatchAllDocsQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMatchAllDocsQuery.cs Sat Aug 11 09:56:37 2007
@@ -16,13 +16,15 @@
  */
 
 using System;
+
+using NUnit.Framework;
+
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using NUnit.Framework;
 
 namespace Lucene.Net.Search
 {

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiPhraseQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiPhraseQuery.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiPhraseQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiPhraseQuery.cs Sat Aug 11 09:56:37 2007
@@ -16,16 +16,18 @@
  */
 
 using System;
+
+using NUnit.Framework;
+
+using Term = Lucene.Net.Index.Term;
+using TermEnum = Lucene.Net.Index.TermEnum;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using IndexReader = Lucene.Net.Index.IndexReader;
-using IndexWriter = Lucene.Net.Index.IndexWriter;
-using Term = Lucene.Net.Index.Term;
-using TermEnum = Lucene.Net.Index.TermEnum;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using NUnit.Framework;
 
 namespace Lucene.Net.Search
 {

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcher.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiSearcher.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcher.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcher.cs Sat Aug 11 09:56:37 2007
@@ -16,8 +16,11 @@
  */
 
 using System;
-using KeywordAnalyzer = Lucene.Net.Analysis.KeywordAnalyzer;
+
+using NUnit.Framework;
+
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using KeywordAnalyzer = Lucene.Net.Analysis.KeywordAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
@@ -26,7 +29,6 @@
 using QueryParser = Lucene.Net.QueryParsers.QueryParser;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using NUnit.Framework;
 
 namespace Lucene.Net.Search
 {
@@ -132,7 +134,7 @@
 			searchers2[0] = new IndexSearcher(indexStoreB);
 			searchers2[1] = new IndexSearcher(indexStoreA);
 			// creating the mulitSearcher
-			Searcher mSearcher2 = GetMultiSearcherInstance(searchers2);
+			MultiSearcher mSearcher2 = GetMultiSearcherInstance(searchers2);
 			// performing the same search
 			Hits hits2 = mSearcher2.Search(query);
 			
@@ -146,7 +148,18 @@
 			}
 			mSearcher2.Close();
 			
-			//--------------------------------------------------------------------
+            // test the subSearcher() method:
+            Query subSearcherQuery = parser.Parse("id:doc1");
+            hits2 = mSearcher2.Search(subSearcherQuery);
+            Assert.AreEqual(2, hits2.Length());
+            Assert.AreEqual(0, mSearcher2.SubSearcher(hits2.Id(0))); // hit from searchers2[0]
+            Assert.AreEqual(1, mSearcher2.SubSearcher(hits2.Id(1))); // hit from searchers2[1]
+            subSearcherQuery = parser.Parse("id:doc2");
+            hits2 = mSearcher2.Search(subSearcherQuery);
+            Assert.AreEqual(1, hits2.Length());
+            Assert.AreEqual(1, mSearcher2.SubSearcher(hits2.Id(0))); // hit from searchers2[1]
+			
+            //--------------------------------------------------------------------
 			// scenario 3
 			//--------------------------------------------------------------------
 			

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcherRanking.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiSearcherRanking.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcherRanking.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiSearcherRanking.cs Sat Aug 11 09:56:37 2007
@@ -16,7 +16,9 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiThreadTermVectors.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiThreadTermVectors.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiThreadTermVectors.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiThreadTermVectors.cs Sat Aug 11 09:56:37 2007
@@ -16,10 +16,11 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using Lucene.Net.Documents;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using TermFreqVector = Lucene.Net.Index.TermFreqVector;
@@ -50,7 +51,7 @@
 			for (int i = 0; i < numDocs; i++)
 			{
 				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-				Field fld = new Field("field", English.IntToEnglish(i), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.YES);
+				Fieldable fld = new Field("field", English.IntToEnglish(i), Field.Store.YES, Field.Index.UN_TOKENIZED, Field.TermVector.YES);
 				doc.Add(fld);
 				writer.AddDocument(doc);
 			}
@@ -184,16 +185,16 @@
 			long start = 0L;
 			for (int docId = 0; docId < numDocs; docId++)
 			{
-				start = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
+				start = System.DateTime.Now.Millisecond;
 				TermFreqVector[] vectors = reader.GetTermFreqVectors(docId);
-				timeElapsed += (System.DateTime.Now.Ticks - 621355968000000000) / 10000 - start;
+				timeElapsed += System.DateTime.Now.Millisecond - start;
 				
 				// verify vectors result
 				VerifyVectors(vectors, docId);
 				
-				start = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
+				start = System.DateTime.Now.Millisecond;
 				TermFreqVector vector = reader.GetTermFreqVector(docId, "field");
-				timeElapsed += (System.DateTime.Now.Ticks - 621355968000000000) / 10000 - start;
+				timeElapsed += System.DateTime.Now.Millisecond - start;
 				
 				vectors = new TermFreqVector[1];
 				vectors[0] = vector;

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestNot.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestNot.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestNot.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestNot.cs Sat Aug 11 09:56:37 2007
@@ -16,13 +16,15 @@
  */
 
 using System;
+
 using NUnit.Framework;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using QueryParser = Lucene.Net.QueryParsers.QueryParser;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
 
 namespace Lucene.Net.Search
 {

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestParallelMultiSearcher.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestParallelMultiSearcher.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestParallelMultiSearcher.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestParallelMultiSearcher.cs Sat Aug 11 09:56:37 2007
@@ -21,7 +21,7 @@
 {
 	
 	/// <summary> Unit tests for the ParallelMultiSearcher </summary>
-	public class TestParallelMultiSearcher:TestMultiSearcher
+	public class TestParallelMultiSearcher : TestMultiSearcher
 	{
 		
 		protected internal override MultiSearcher GetMultiSearcherInstance(Searcher[] searchers)

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPhrasePrefixQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPhrasePrefixQuery.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPhrasePrefixQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPhrasePrefixQuery.cs Sat Aug 11 09:56:37 2007
@@ -16,6 +16,9 @@
  */
 
 using System;
+
+using NUnit.Framework;
+
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
@@ -24,7 +27,6 @@
 using Term = Lucene.Net.Index.Term;
 using TermEnum = Lucene.Net.Index.TermEnum;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using NUnit.Framework;
 
 namespace Lucene.Net.Search
 {

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPhraseQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPhraseQuery.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPhraseQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPhraseQuery.cs Sat Aug 11 09:56:37 2007
@@ -16,14 +16,11 @@
  */
 
 using System;
+
 using NUnit.Framework;
-using Analyzer = Lucene.Net.Analysis.Analyzer;
-using StopAnalyzer = Lucene.Net.Analysis.StopAnalyzer;
-using TokenStream = Lucene.Net.Analysis.TokenStream;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using WhitespaceTokenizer = Lucene.Net.Analysis.WhitespaceTokenizer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+
+using Lucene.Net.Analysis;
+using Lucene.Net.Documents;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using Directory = Lucene.Net.Store.Directory;
@@ -87,7 +84,7 @@
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
 			doc.Add(new Field("field", "one two three four five", Field.Store.YES, Field.Index.TOKENIZED));
 			doc.Add(new Field("repeated", "this is a repeated field - first part", Field.Store.YES, Field.Index.TOKENIZED));
-			Field repeatedField = new Field("repeated", "second part of a repeated field", Field.Store.YES, Field.Index.TOKENIZED);
+			Fieldable repeatedField = new Field("repeated", "second part of a repeated field", Field.Store.YES, Field.Index.TOKENIZED);
 			doc.Add(repeatedField);
 			writer.AddDocument(doc);
 			
@@ -113,7 +110,8 @@
 			query.Add(new Term("field", "five"));
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(0, hits.Length());
-		}
+            QueryUtils.Check(query, searcher);
+        }
 		
 		[Test]
         public virtual void  TestBarelyCloseEnough()
@@ -123,7 +121,8 @@
 			query.Add(new Term("field", "five"));
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length());
-		}
+            QueryUtils.Check(query, searcher);
+        }
 		
 		/// <summary> Ensures slop of 0 works for exact matches, but not reversed</summary>
 		[Test]
@@ -134,13 +133,16 @@
 			query.Add(new Term("field", "five"));
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length(), "exact match");
+            QueryUtils.Check(query, searcher);
+
 			
 			query = new PhraseQuery();
 			query.Add(new Term("field", "two"));
 			query.Add(new Term("field", "one"));
 			hits = searcher.Search(query);
 			Assert.AreEqual(0, hits.Length(), "reverse not exact");
-		}
+            QueryUtils.Check(query, searcher);
+        }
 		
 		[Test]
         public virtual void  TestSlop1()
@@ -151,6 +153,8 @@
 			query.Add(new Term("field", "two"));
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length(), "in order");
+            QueryUtils.Check(query, searcher);
+
 			
 			// Ensures slop of 1 does not work for phrases out of order;
 			// must be at least 2.
@@ -160,7 +164,8 @@
 			query.Add(new Term("field", "one"));
 			hits = searcher.Search(query);
 			Assert.AreEqual(0, hits.Length(), "reversed, slop not 2 or more");
-		}
+            QueryUtils.Check(query, searcher);
+        }
 		
 		/// <summary> As long as slop is at least 2, terms can be reversed</summary>
 		[Test]
@@ -171,6 +176,8 @@
 			query.Add(new Term("field", "one"));
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length(), "just sloppy enough");
+            QueryUtils.Check(query, searcher);
+
 			
 			query = new PhraseQuery();
 			query.SetSlop(2);
@@ -178,7 +185,8 @@
 			query.Add(new Term("field", "one"));
 			hits = searcher.Search(query);
 			Assert.AreEqual(0, hits.Length(), "not sloppy enough");
-		}
+            QueryUtils.Check(query, searcher);
+        }
 		
 		/// <summary> slop is the total number of positional moves allowed
 		/// to line up a phrase
@@ -192,6 +200,8 @@
 			query.Add(new Term("field", "five"));
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length(), "two total moves");
+            QueryUtils.Check(query, searcher);
+
 			
 			query = new PhraseQuery();
 			query.SetSlop(5); // it takes six moves to match this phrase
@@ -200,11 +210,14 @@
 			query.Add(new Term("field", "one"));
 			hits = searcher.Search(query);
 			Assert.AreEqual(0, hits.Length(), "slop of 5 not close enough");
+            QueryUtils.Check(query, searcher);
+
 			
 			query.SetSlop(6);
 			hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length(), "slop of 6 just right");
-		}
+            QueryUtils.Check(query, searcher);
+        }
 		
 		[Test]
         public virtual void  TestPhraseQueryWithStopAnalyzer()
@@ -225,6 +238,8 @@
 			query.Add(new Term("field", "words"));
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length());
+            QueryUtils.Check(query, searcher);
+
 			
 			// currently StopAnalyzer does not leave "holes", so this matches.
 			query = new PhraseQuery();
@@ -232,6 +247,8 @@
 			query.Add(new Term("field", "here"));
 			hits = searcher.Search(query);
 			Assert.AreEqual(1, hits.Length());
+            QueryUtils.Check(query, searcher);
+
 			
 			searcher.Close();
 		}
@@ -261,6 +278,8 @@
 			phraseQuery.Add(new Term("source", "info"));
 			Hits hits = searcher.Search(phraseQuery);
 			Assert.AreEqual(2, hits.Length());
+            QueryUtils.Check(phraseQuery, searcher);
+
 			
 			TermQuery termQuery = new TermQuery(new Term("contents", "foobar"));
 			BooleanQuery booleanQuery = new BooleanQuery();
@@ -268,6 +287,8 @@
 			booleanQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
 			hits = searcher.Search(booleanQuery);
 			Assert.AreEqual(1, hits.Length());
+            QueryUtils.Check(termQuery, searcher);
+
 			
 			searcher.Close();
 			
@@ -298,6 +319,7 @@
 			Assert.AreEqual(3, hits.Length());
 			hits = searcher.Search(phraseQuery);
 			Assert.AreEqual(2, hits.Length());
+
 			
 			booleanQuery = new BooleanQuery();
 			booleanQuery.Add(termQuery, BooleanClause.Occur.MUST);
@@ -310,6 +332,8 @@
 			booleanQuery.Add(termQuery, BooleanClause.Occur.MUST);
 			hits = searcher.Search(booleanQuery);
 			Assert.AreEqual(2, hits.Length());
+            QueryUtils.Check(booleanQuery, searcher);
+
 			
 			searcher.Close();
 			directory.Close();
@@ -351,7 +375,8 @@
 			Assert.AreEqual(1, hits.Id(1));
 			Assert.AreEqual(0.31, hits.Score(2), 0.01);
 			Assert.AreEqual(2, hits.Id(2));
-		}
+            QueryUtils.Check(query, searcher);
+        }
 		
 		[Test]
         public virtual void  TestWrappedPhrase()
@@ -364,6 +389,7 @@
 			
 			Hits hits = searcher.Search(query);
 			Assert.AreEqual(0, hits.Length());
-		}
+            QueryUtils.Check(query, searcher);
+        }
 	}
 }

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPositionIncrement.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs Sat Aug 11 09:56:37 2007
@@ -16,16 +16,18 @@
  */
 
 using System;
+
+using NUnit.Framework;
+
+using Term = Lucene.Net.Index.Term;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
 using Token = Lucene.Net.Analysis.Token;
 using TokenStream = Lucene.Net.Analysis.TokenStream;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using IndexWriter = Lucene.Net.Index.IndexWriter;
-using Term = Lucene.Net.Index.Term;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using NUnit.Framework;
 
 namespace Lucene.Net.Search
 {

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPrefixFilter.cs?view=auto&rev=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixFilter.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixFilter.cs Sat Aug 11 09:56:37 2007
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+
+namespace Lucene.Net.Search
+{
+	
+	/// <summary> Tests {@link PrefixFilter} class.
+	/// 
+	/// </summary>
+	/// <author>  Yura Smolsky
+	/// </author>
+	/// <author>  yonik
+	/// </author>
+    [TestFixture]
+    public class TestPrefixFilter
+	{
+        [Test]
+		public virtual void  _TestPrefixFilter()
+		{
+			RAMDirectory directory = new RAMDirectory();
+			
+			System.String[] categories = new System.String[]{"/Computers/Linux", "/Computers/Mac/One", "/Computers/Mac/Two", "/Computers/Windows"};
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+			for (int i = 0; i < categories.Length; i++)
+			{
+				Document doc = new Document();
+				doc.Add(new Field("category", categories[i], Field.Store.YES, Field.Index.UN_TOKENIZED));
+				writer.AddDocument(doc);
+			}
+			writer.Close();
+			
+			// PrefixFilter combined with ConstantScoreQuery
+			PrefixFilter filter = new PrefixFilter(new Term("category", "/Computers"));
+			Query query = new ConstantScoreQuery(filter);
+			IndexSearcher searcher = new IndexSearcher(directory);
+			Hits hits = searcher.Search(query);
+			Assert.AreEqual(4, hits.Length());
+			
+			// test middle of values
+			filter = new PrefixFilter(new Term("category", "/Computers/Mac"));
+			query = new ConstantScoreQuery(filter);
+			hits = searcher.Search(query);
+			Assert.AreEqual(2, hits.Length());
+			
+			// test start of values
+			filter = new PrefixFilter(new Term("category", "/Computers/Linux"));
+			query = new ConstantScoreQuery(filter);
+			hits = searcher.Search(query);
+			Assert.AreEqual(1, hits.Length());
+			
+			// test end of values
+			filter = new PrefixFilter(new Term("category", "/Computers/Windows"));
+			query = new ConstantScoreQuery(filter);
+			hits = searcher.Search(query);
+			Assert.AreEqual(1, hits.Length());
+			
+			// test non-existant
+			filter = new PrefixFilter(new Term("category", "/Computers/ObsoleteOS"));
+			query = new ConstantScoreQuery(filter);
+			hits = searcher.Search(query);
+			Assert.AreEqual(0, hits.Length());
+			
+			// test non-existant, before values
+			filter = new PrefixFilter(new Term("category", "/Computers/AAA"));
+			query = new ConstantScoreQuery(filter);
+			hits = searcher.Search(query);
+			Assert.AreEqual(0, hits.Length());
+			
+			// test non-existant, after values
+			filter = new PrefixFilter(new Term("category", "/Computers/ZZZ"));
+			query = new ConstantScoreQuery(filter);
+			hits = searcher.Search(query);
+			Assert.AreEqual(0, hits.Length());
+			
+			// test zero length prefix
+			filter = new PrefixFilter(new Term("category", ""));
+			query = new ConstantScoreQuery(filter);
+			hits = searcher.Search(query);
+			Assert.AreEqual(4, hits.Length());
+			
+			// test non existent field
+			filter = new PrefixFilter(new Term("nonexistantfield", "/Computers"));
+			query = new ConstantScoreQuery(filter);
+			hits = searcher.Search(query);
+			Assert.AreEqual(0, hits.Length());
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPrefixQuery.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixQuery.cs Sat Aug 11 09:56:37 2007
@@ -16,13 +16,15 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using IndexWriter = Lucene.Net.Index.IndexWriter;
-using Term = Lucene.Net.Index.Term;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 
 namespace Lucene.Net.Search
 {

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestQueryTermVector.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestQueryTermVector.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestQueryTermVector.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestQueryTermVector.cs Sat Aug 11 09:56:37 2007
@@ -16,7 +16,9 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 
 namespace Lucene.Net.Search

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestRangeFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestRangeFilter.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestRangeFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestRangeFilter.cs Sat Aug 11 09:56:37 2007
@@ -16,7 +16,9 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
 using IndexReader = Lucene.Net.Index.IndexReader;
 using Term = Lucene.Net.Index.Term;
 
@@ -31,16 +33,11 @@
 	/// nor does it adequately test 'negative' results.  It also does not test
 	/// that garbage in results in an Exception.
 	/// </summary>
-	public class TestRangeFilter:BaseTestRangeFilter
+    [TestFixture]
+    public class TestRangeFilter : BaseTestRangeFilter
 	{
-		public TestRangeFilter(System.String name) : base(name)
-		{
-		}
-		
-        public TestRangeFilter() : base()
-		{
-		}
 		
+        [Test]
         public virtual void  TestRangeFilterId()
 		{
 			
@@ -123,6 +120,7 @@
 			Assert.AreEqual(1, result.Length(), "med,med,T,T");
 		}
 		
+        [Test]
         public virtual void  TestRangeFilterRand()
 		{
 			

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestRangeQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestRangeQuery.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestRangeQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestRangeQuery.cs Sat Aug 11 09:56:37 2007
@@ -16,13 +16,15 @@
  */
 
 using System;
+
+using NUnit.Framework;
+
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using NUnit.Framework;
 
 namespace Lucene.Net.Search
 {

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestRemoteSearchable.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestRemoteSearchable.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestRemoteSearchable.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestRemoteSearchable.cs Sat Aug 11 09:56:37 2007
@@ -16,13 +16,15 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
+using Term = Lucene.Net.Index.Term;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using IndexWriter = Lucene.Net.Index.IndexWriter;
-using Term = Lucene.Net.Index.Term;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 
 namespace Lucene.Net.Search
 {

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestScorerPerf.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestScorerPerf.cs?view=auto&rev=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestScorerPerf.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestScorerPerf.cs Sat Aug 11 09:56:37 2007
@@ -0,0 +1,427 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using Directory = Lucene.Net.Store.Directory;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+
+namespace Lucene.Net.Search
+{
+	
+	/// <author>  yonik
+	/// </author>
+	/// <version>  $Id$
+	/// </version>
+    [TestFixture]
+    public class TestScorerPerf
+	{
+		internal System.Random r = new System.Random((System.Int32) 0);
+		internal bool validate = true; // set to false when doing performance testing
+		
+		internal System.Collections.BitArray[] sets;
+		internal IndexSearcher s;
+		
+		public virtual void  CreateDummySearcher()
+		{
+			// Create a dummy index with nothing in it.
+			// This could possibly fail if Lucene starts checking for docid ranges...
+			RAMDirectory rd = new RAMDirectory();
+			IndexWriter iw = new IndexWriter(rd, new WhitespaceAnalyzer(), true);
+			iw.Close();
+			s = new IndexSearcher(rd);
+		}
+		
+		public virtual void  CreateRandomTerms(int nDocs, int nTerms, double power, Directory dir)
+		{
+			int[] freq = new int[nTerms];
+			for (int i = 0; i < nTerms; i++)
+			{
+				int f = (nTerms + 1) - i; // make first terms less frequent
+				freq[i] = (int) System.Math.Ceiling(System.Math.Pow(f, power));
+			}
+			
+			IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			iw.SetMaxBufferedDocs(123);
+			for (int i = 0; i < nDocs; i++)
+			{
+				Document d = new Document();
+				for (int j = 0; j < nTerms; j++)
+				{
+					if (r.Next(freq[j]) == 0)
+					{
+						d.Add(new Field("f", j.ToString(), Field.Store.NO, Field.Index.UN_TOKENIZED));
+					}
+				}
+				iw.AddDocument(d);
+			}
+			iw.Close();
+		}
+		
+		
+		public virtual System.Collections.BitArray RandBitSet(int sz, int numBitsToSet)
+		{
+			System.Collections.BitArray set_Renamed = new System.Collections.BitArray((sz % 64 == 0 ? sz / 64 : sz / 64 + 1) * 64);
+			for (int i = 0; i < numBitsToSet; i++)
+			{
+                set_Renamed.Set(r.Next(sz), true);
+			}
+			return set_Renamed;
+		}
+		
+		public virtual System.Collections.BitArray[] RandBitSets(int numSets, int setSize)
+		{
+			System.Collections.BitArray[] sets = new System.Collections.BitArray[numSets];
+			for (int i = 0; i < sets.Length; i++)
+			{
+				sets[i] = RandBitSet(setSize, r.Next(setSize));
+			}
+			return sets;
+		}
+		
+		[Serializable]
+		public class BitSetFilter : Filter
+		{
+			public System.Collections.BitArray set_Renamed;
+			public BitSetFilter(System.Collections.BitArray set_Renamed)
+			{
+				this.set_Renamed = set_Renamed;
+			}
+			public override System.Collections.BitArray Bits(IndexReader reader)
+			{
+				return set_Renamed;
+			}
+		}
+		
+		public class CountingHitCollector : HitCollector
+		{
+			virtual public int Count
+			{
+				get
+				{
+					return count;
+				}
+				
+			}
+			virtual public int Sum
+			{
+				get
+				{
+					return sum;
+				}
+				
+			}
+			internal int count = 0;
+			internal int sum = 0;
+			
+			public override void  Collect(int doc, float score)
+			{
+				count++;
+				sum += doc; // use it to avoid any possibility of being optimized away
+			}
+		}
+		
+		
+		public class MatchingHitCollector : CountingHitCollector
+		{
+			internal System.Collections.BitArray answer;
+			internal int pos = - 1;
+			public MatchingHitCollector(System.Collections.BitArray answer)
+			{
+				this.answer = answer;
+			}
+			
+			public override void  Collect(int doc, float score)
+			{
+				pos = SupportClass.Number.NextSetBit(answer, pos + 1);
+				if (pos != doc)
+				{
+					throw new System.SystemException("Expected doc " + pos + " but got " + doc);
+				}
+				base.Collect(doc, score);
+			}
+		}
+		
+		
+		internal virtual System.Collections.BitArray AddClause(BooleanQuery bq, System.Collections.BitArray result)
+		{
+			System.Collections.BitArray rnd = sets[r.Next(sets.Length)];
+			Query q = new ConstantScoreQuery(new BitSetFilter(rnd));
+			bq.Add(q, BooleanClause.Occur.MUST);
+			if (validate)
+			{
+				if (result == null)
+					result = (System.Collections.BitArray) rnd.Clone();
+				else
+				{
+					result.And(rnd);
+				}
+			}
+			return result;
+		}
+		
+		
+		public virtual int DoConjunctions(int iter, int maxClauses)
+		{
+			int ret = 0;
+			
+			for (int i = 0; i < iter; i++)
+			{
+				int nClauses = r.Next(maxClauses - 1) + 2; // min 2 clauses
+				BooleanQuery bq = new BooleanQuery();
+				System.Collections.BitArray result = null;
+				for (int j = 0; j < nClauses; j++)
+				{
+					result = AddClause(bq, result);
+				}
+				
+				CountingHitCollector hc = validate?new MatchingHitCollector(result):new CountingHitCollector();
+				s.Search(bq, hc);
+				ret += hc.Sum;
+				if (validate)
+					Assert.AreEqual(SupportClass.Number.Cardinality(result), hc.Count);
+				// System.out.println(hc.getCount());
+			}
+			
+			return ret;
+		}
+		
+		public virtual int DoNestedConjunctions(int iter, int maxOuterClauses, int maxClauses)
+		{
+			int ret = 0;
+			
+			for (int i = 0; i < iter; i++)
+			{
+				int oClauses = r.Next(maxOuterClauses - 1) + 2;
+				BooleanQuery oq = new BooleanQuery();
+				System.Collections.BitArray result = null;
+				
+				for (int o = 0; o < oClauses; o++)
+				{
+					
+					int nClauses = r.Next(maxClauses - 1) + 2; // min 2 clauses
+					BooleanQuery bq = new BooleanQuery();
+					for (int j = 0; j < nClauses; j++)
+					{
+						result = AddClause(bq, result);
+					}
+					
+					oq.Add(bq, BooleanClause.Occur.MUST);
+				} // outer
+				
+				
+				CountingHitCollector hc = validate ? new MatchingHitCollector(result) : new CountingHitCollector();
+				s.Search(oq, hc);
+				ret += hc.Sum;
+				if (validate)
+					Assert.AreEqual(SupportClass.Number.Cardinality(result), hc.Count);
+				// System.out.println(hc.getCount());
+			}
+			
+			return ret;
+		}
+		
+		
+		public virtual int DoTermConjunctions(IndexSearcher s, int termsInIndex, int maxClauses, int iter)
+		{
+			int ret = 0;
+			
+			for (int i = 0; i < iter; i++)
+			{
+				int nClauses = r.Next(maxClauses - 1) + 2; // min 2 clauses
+				BooleanQuery bq = new BooleanQuery();
+				System.Collections.BitArray terms = new System.Collections.BitArray((termsInIndex % 64 == 0 ? termsInIndex / 64 : termsInIndex / 64 + 1) * 64);
+				for (int j = 0; j < nClauses; j++)
+				{
+					int tnum;
+					// don't pick same clause twice
+					do 
+					{
+						tnum = r.Next(termsInIndex);
+					}
+					while (terms.Get(tnum));
+					Query tq = new TermQuery(new Term("f", tnum.ToString()));
+					bq.Add(tq, BooleanClause.Occur.MUST);
+				}
+				
+				CountingHitCollector hc = new CountingHitCollector();
+				s.Search(bq, hc);
+				ret += hc.Sum;
+			}
+			
+			return ret;
+		}
+		
+		
+		public virtual int DoNestedTermConjunctions(IndexSearcher s, int termsInIndex, int maxOuterClauses, int maxClauses, int iter)
+		{
+			int ret = 0;
+			
+			for (int i = 0; i < iter; i++)
+			{
+				int oClauses = r.Next(maxOuterClauses - 1) + 2;
+				BooleanQuery oq = new BooleanQuery();
+				for (int o = 0; o < oClauses; o++)
+				{
+					
+					int nClauses = r.Next(maxClauses - 1) + 2; // min 2 clauses
+					BooleanQuery bq = new BooleanQuery();
+					System.Collections.BitArray terms = new System.Collections.BitArray((termsInIndex % 64 == 0 ? termsInIndex / 64 : termsInIndex / 64 + 1) * 64);
+					for (int j = 0; j < nClauses; j++)
+					{
+						int tnum;
+						// don't pick same clause twice
+						do 
+						{
+							tnum = r.Next(termsInIndex);
+						}
+						while (terms.Get(tnum));
+						Query tq = new TermQuery(new Term("f", tnum.ToString()));
+						bq.Add(tq, BooleanClause.Occur.MUST);
+					} // inner
+					
+					oq.Add(bq, BooleanClause.Occur.MUST);
+				} // outer
+				
+				
+				CountingHitCollector hc = new CountingHitCollector();
+				s.Search(oq, hc);
+				ret += hc.Sum;
+			}
+			
+			return ret;
+		}
+		
+		
+		public virtual int DoSloppyPhrase(IndexSearcher s, int termsInIndex, int maxClauses, int iter)
+		{
+			int ret = 0;
+			
+			for (int i = 0; i < iter; i++)
+			{
+				int nClauses = r.Next(maxClauses - 1) + 2; // min 2 clauses
+				PhraseQuery q = new PhraseQuery();
+				for (int j = 0; j < nClauses; j++)
+				{
+					int tnum = r.Next(termsInIndex);
+					q.Add(new Term("f", tnum.ToString()), j);
+				}
+				q.SetSlop(termsInIndex); // this could be random too
+				
+				CountingHitCollector hc = new CountingHitCollector();
+				s.Search(q, hc);
+				ret += hc.Sum;
+			}
+			
+			return ret;
+		}
+		
+		[Test]
+		public virtual void  TestConjunctions()
+		{
+			// test many small sets... the bugs will be found on boundary conditions
+			CreateDummySearcher();
+			validate = true;
+			sets = RandBitSets(1000, 10);
+			DoConjunctions(10000, 5);
+			DoNestedConjunctions(10000, 3, 3);
+			s.Close();
+		}
+		
+		/// <summary> 
+		/// int bigIter=6;
+		/// public void testConjunctionPerf() throws Exception {
+		/// CreateDummySearcher();
+		/// validate=false;
+		/// sets=RandBitSets(32,1000000);
+		/// for (int i=0; i<bigIter; i++) {
+		/// long start = System.currentTimeMillis();
+		/// DoConjunctions(500,6);
+		/// long end = System.currentTimeMillis();
+		/// System.out.println("milliseconds="+(end-start));
+		/// }
+		/// s.close();
+		/// }
+		/// public void testNestedConjunctionPerf() throws Exception {
+		/// CreateDummySearcher();
+		/// validate=false;
+		/// sets=RandBitSets(32,1000000);
+		/// for (int i=0; i<bigIter; i++) {
+		/// long start = System.currentTimeMillis();
+		/// DoNestedConjunctions(500,3,3);
+		/// long end = System.currentTimeMillis();
+		/// System.out.println("milliseconds="+(end-start));
+		/// }
+		/// s.close();
+		/// }
+		/// public void testConjunctionTerms() throws Exception {
+		/// validate=false;
+		/// RAMDirectory dir = new RAMDirectory();
+		/// System.out.println("Creating index");
+		/// CreateRandomTerms(100000,25,2, dir);
+		/// s = new IndexSearcher(dir);
+		/// System.out.println("Starting performance test");
+		/// for (int i=0; i<bigIter; i++) {
+		/// long start = System.currentTimeMillis();
+		/// DoTermConjunctions(s,25,5,10000);
+		/// long end = System.currentTimeMillis();
+		/// System.out.println("milliseconds="+(end-start));
+		/// }
+		/// s.close();
+		/// }
+		/// public void testNestedConjunctionTerms() throws Exception {
+		/// validate=false;    
+		/// RAMDirectory dir = new RAMDirectory();
+		/// System.out.println("Creating index");
+		/// CreateRandomTerms(100000,25,2, dir);
+		/// s = new IndexSearcher(dir);
+		/// System.out.println("Starting performance test");
+		/// for (int i=0; i<bigIter; i++) {
+		/// long start = System.currentTimeMillis();
+		/// DoNestedTermConjunctions(s,25,5,5,1000);
+		/// long end = System.currentTimeMillis();
+		/// System.out.println("milliseconds="+(end-start));
+		/// }
+		/// s.close();
+		/// }
+		/// public void testSloppyPhrasePerf() throws Exception {
+		/// validate=false;    
+		/// RAMDirectory dir = new RAMDirectory();
+		/// System.out.println("Creating index");
+		/// CreateRandomTerms(100000,25,2,dir);
+		/// s = new IndexSearcher(dir);
+		/// System.out.println("Starting performance test");
+		/// for (int i=0; i<bigIter; i++) {
+		/// long start = System.currentTimeMillis();
+		/// DoSloppyPhrase(s,25,2,1000);
+		/// long end = System.currentTimeMillis();
+		/// System.out.println("milliseconds="+(end-start));
+		/// }
+		/// s.close();
+		/// }
+		/// *
+		/// </summary>
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSetNorm.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSetNorm.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSetNorm.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSetNorm.cs Sat Aug 11 09:56:37 2007
@@ -16,10 +16,11 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using Lucene.Net.Documents;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
@@ -73,7 +74,7 @@
 			IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true);
 			
 			// add the same document four times
-			Field f1 = new Field("field", "word", Field.Store.YES, Field.Index.TOKENIZED);
+			Fieldable f1 = new Field("field", "word", Field.Store.YES, Field.Index.TOKENIZED);
 			Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
 			d1.Add(f1);
 			writer.AddDocument(d1);

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSimilarity.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSimilarity.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSimilarity.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSimilarity.cs Sat Aug 11 09:56:37 2007
@@ -16,13 +16,15 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
+using Term = Lucene.Net.Index.Term;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using IndexWriter = Lucene.Net.Index.IndexWriter;
-using Term = Lucene.Net.Index.Term;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 
 namespace Lucene.Net.Search
 {

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanations.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSimpleExplanations.cs?view=auto&rev=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanations.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanations.cs Sat Aug 11 09:56:37 2007
@@ -0,0 +1,450 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using Term = Lucene.Net.Index.Term;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
+using ParseException = Lucene.Net.QueryParsers.ParseException;
+
+namespace Lucene.Net.Search
+{
+	
+	/// <summary> TestExplanations subclass focusing on basic query types</summary>
+    [TestFixture]
+    public class TestSimpleExplanations : TestExplanations
+	{
+		
+		// we focus on queries that don't rewrite to other queries.
+		// if we get those covered well, then the ones that rewrite should
+		// also be covered.
+		
+		
+		/* simple term tests */
+
+		[Test]
+		public virtual void  TestT1()
+		{
+			Qtest("w1", new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestT2()
+		{
+			Qtest("w1^1000", new int[]{0, 1, 2, 3});
+		}
+		
+		/* MatchAllDocs */
+		
+        [Test]
+        public virtual void  TestMA1()
+		{
+			Qtest(new MatchAllDocsQuery(), new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestMA2()
+		{
+			Query q = new MatchAllDocsQuery();
+			q.SetBoost(1000);
+			Qtest(q, new int[]{0, 1, 2, 3});
+		}
+		
+		/* some simple phrase tests */
+		
+        [Test]
+        public virtual void  TestP1()
+		{
+			Qtest("\"w1 w2\"", new int[]{0});
+		}
+
+        [Test]
+        public virtual void  TestP2()
+		{
+			Qtest("\"w1 w3\"", new int[]{1, 3});
+		}
+
+        [Test]
+        public virtual void  TestP3()
+		{
+			Qtest("\"w1 w2\"~1", new int[]{0, 1, 2});
+		}
+
+        [Test]
+        public virtual void  TestP4()
+		{
+			Qtest("\"w2 w3\"~1", new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestP5()
+		{
+			Qtest("\"w3 w2\"~1", new int[]{1, 3});
+		}
+
+        [Test]
+        public virtual void  TestP6()
+		{
+			Qtest("\"w3 w2\"~2", new int[]{0, 1, 3});
+		}
+
+        [Test]
+        public virtual void  TestP7()
+		{
+			Qtest("\"w3 w2\"~3", new int[]{0, 1, 2, 3});
+		}
+		
+		/* some simple filtered query tests */
+		
+        [Test]
+        public virtual void  TestFQ1()
+		{
+			Qtest(new FilteredQuery(qp.Parse("w1"), new ItemizedFilter(new int[]{0, 1, 2, 3})), new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestFQ2()
+		{
+			Qtest(new FilteredQuery(qp.Parse("w1"), new ItemizedFilter(new int[]{0, 2, 3})), new int[]{0, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestFQ3()
+		{
+			Qtest(new FilteredQuery(qp.Parse("xx"), new ItemizedFilter(new int[]{1, 3})), new int[]{3});
+		}
+
+        [Test]
+        public virtual void  TestFQ4()
+		{
+			Qtest(new FilteredQuery(qp.Parse("xx^1000"), new ItemizedFilter(new int[]{1, 3})), new int[]{3});
+		}
+
+        [Test]
+        public virtual void  TestFQ6()
+		{
+			Query q = new FilteredQuery(qp.Parse("xx"), new ItemizedFilter(new int[]{1, 3}));
+			q.SetBoost(1000);
+			Qtest(q, new int[]{3});
+		}
+
+        [Test]
+        public virtual void  TestFQ7()
+		{
+			Query q = new FilteredQuery(qp.Parse("xx"), new ItemizedFilter(new int[]{1, 3}));
+			q.SetBoost(0);
+			Qtest(q, new int[]{3});
+		}
+		
+		/* ConstantScoreQueries */
+		
+        [Test]
+        public virtual void  TestCSQ1()
+		{
+			Query q = new ConstantScoreQuery(new ItemizedFilter(new int[]{0, 1, 2, 3}));
+			Qtest(q, new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestCSQ2()
+		{
+			Query q = new ConstantScoreQuery(new ItemizedFilter(new int[]{1, 3}));
+			Qtest(q, new int[]{1, 3});
+		}
+
+        [Test]
+        public virtual void  TestCSQ3()
+		{
+			Query q = new ConstantScoreQuery(new ItemizedFilter(new int[]{0, 2}));
+			q.SetBoost(1000);
+			Qtest(q, new int[]{0, 2});
+		}
+		
+		/* DisjunctionMaxQuery */
+		
+        [Test]
+        public virtual void  TestDMQ1()
+		{
+			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
+			q.Add(qp.Parse("w1"));
+			q.Add(qp.Parse("w5"));
+			Qtest(q, new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestDMQ2()
+		{
+			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
+			q.Add(qp.Parse("w1"));
+			q.Add(qp.Parse("w5"));
+			Qtest(q, new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestDMQ3()
+		{
+			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
+			q.Add(qp.Parse("QQ"));
+			q.Add(qp.Parse("w5"));
+			Qtest(q, new int[]{0});
+		}
+
+        [Test]
+        public virtual void  TestDMQ4()
+		{
+			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
+			q.Add(qp.Parse("QQ"));
+			q.Add(qp.Parse("xx"));
+			Qtest(q, new int[]{2, 3});
+		}
+
+        [Test]
+        public virtual void  TestDMQ5()
+		{
+			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
+			q.Add(qp.Parse("yy -QQ"));
+			q.Add(qp.Parse("xx"));
+			Qtest(q, new int[]{2, 3});
+		}
+
+        [Test]
+        public virtual void  TestDMQ6()
+		{
+			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
+			q.Add(qp.Parse("-yy w3"));
+			q.Add(qp.Parse("xx"));
+			Qtest(q, new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestDMQ7()
+		{
+			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
+			q.Add(qp.Parse("-yy w3"));
+			q.Add(qp.Parse("w2"));
+			Qtest(q, new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestDMQ8()
+		{
+			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
+			q.Add(qp.Parse("yy w5^100"));
+			q.Add(qp.Parse("xx^100000"));
+			Qtest(q, new int[]{0, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestDMQ9()
+		{
+			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
+			q.Add(qp.Parse("yy w5^100"));
+			q.Add(qp.Parse("xx^0"));
+			Qtest(q, new int[]{0, 2, 3});
+		}
+		
+		/* MultiPhraseQuery */
+		
+        [Test]
+        public virtual void  TestMPQ1()
+		{
+			MultiPhraseQuery q = new MultiPhraseQuery();
+			q.Add(Ta(new System.String[]{"w1"}));
+			q.Add(Ta(new System.String[]{"w2", "w3", "xx"}));
+			Qtest(q, new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestMPQ2()
+		{
+			MultiPhraseQuery q = new MultiPhraseQuery();
+			q.Add(Ta(new System.String[]{"w1"}));
+			q.Add(Ta(new System.String[]{"w2", "w3"}));
+			Qtest(q, new int[]{0, 1, 3});
+		}
+
+        [Test]
+        public virtual void  TestMPQ3()
+		{
+			MultiPhraseQuery q = new MultiPhraseQuery();
+			q.Add(Ta(new System.String[]{"w1", "xx"}));
+			q.Add(Ta(new System.String[]{"w2", "w3"}));
+			Qtest(q, new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestMPQ4()
+		{
+			MultiPhraseQuery q = new MultiPhraseQuery();
+			q.Add(Ta(new System.String[]{"w1"}));
+			q.Add(Ta(new System.String[]{"w2"}));
+			Qtest(q, new int[]{0});
+		}
+
+        [Test]
+        public virtual void  TestMPQ5()
+		{
+			MultiPhraseQuery q = new MultiPhraseQuery();
+			q.Add(Ta(new System.String[]{"w1"}));
+			q.Add(Ta(new System.String[]{"w2"}));
+			q.SetSlop(1);
+			Qtest(q, new int[]{0, 1, 2});
+		}
+
+        [Test]
+        public virtual void  TestMPQ6()
+		{
+			MultiPhraseQuery q = new MultiPhraseQuery();
+			q.Add(Ta(new System.String[]{"w1", "w3"}));
+			q.Add(Ta(new System.String[]{"w2"}));
+			q.SetSlop(1);
+			Qtest(q, new int[]{0, 1, 2, 3});
+		}
+		
+		/* some simple tests of boolean queries containing term queries */
+		
+        [Test]
+        public virtual void  TestBQ1()
+		{
+			Qtest("+w1 +w2", new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestBQ2()
+		{
+			Qtest("+yy +w3", new int[]{2, 3});
+		}
+
+        [Test]
+        public virtual void  TestBQ3()
+		{
+			Qtest("yy +w3", new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestBQ4()
+		{
+			Qtest("w1 (-xx w2)", new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestBQ5()
+		{
+			Qtest("w1 (+qq w2)", new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestBQ6()
+		{
+			Qtest("w1 -(-qq w5)", new int[]{1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestBQ7()
+		{
+			Qtest("+w1 +(qq (xx -w2) (+w3 +w4))", new int[]{0});
+		}
+
+        [Test]
+        public virtual void  TestBQ8()
+		{
+			Qtest("+w1 (qq (xx -w2) (+w3 +w4))", new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestBQ9()
+		{
+			Qtest("+w1 (qq (-xx w2) -(+w3 +w4))", new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestBQ10()
+		{
+			Qtest("+w1 +(qq (-xx w2) -(+w3 +w4))", new int[]{1});
+		}
+
+        [Test]
+        public virtual void  TestBQ11()
+		{
+			Qtest("w1 w2^1000.0", new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestBQ14()
+		{
+			BooleanQuery q = new BooleanQuery(true);
+			q.Add(qp.Parse("QQQQQ"), BooleanClause.Occur.SHOULD);
+			q.Add(qp.Parse("w1"), BooleanClause.Occur.SHOULD);
+			Qtest(q, new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestBQ15()
+		{
+			BooleanQuery q = new BooleanQuery(true);
+			q.Add(qp.Parse("QQQQQ"), BooleanClause.Occur.MUST_NOT);
+			q.Add(qp.Parse("w1"), BooleanClause.Occur.SHOULD);
+			Qtest(q, new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestBQ16()
+		{
+			BooleanQuery q = new BooleanQuery(true);
+			q.Add(qp.Parse("QQQQQ"), BooleanClause.Occur.SHOULD);
+			q.Add(qp.Parse("w1 -xx"), BooleanClause.Occur.SHOULD);
+			Qtest(q, new int[]{0, 1});
+		}
+
+        [Test]
+        public virtual void  TestBQ17()
+		{
+			BooleanQuery q = new BooleanQuery(true);
+			q.Add(qp.Parse("w2"), BooleanClause.Occur.SHOULD);
+			q.Add(qp.Parse("w1 -xx"), BooleanClause.Occur.SHOULD);
+			Qtest(q, new int[]{0, 1, 2, 3});
+		}
+
+        [Test]
+        public virtual void  TestBQ19()
+		{
+			Qtest("-yy w3", new int[]{0, 1});
+		}
+		
+        [Test]
+        public virtual void  TestBQ20()
+		{
+			BooleanQuery q = new BooleanQuery();
+			q.SetMinimumNumberShouldMatch(2);
+			q.Add(qp.Parse("QQQQQ"), BooleanClause.Occur.SHOULD);
+			q.Add(qp.Parse("yy"), BooleanClause.Occur.SHOULD);
+			q.Add(qp.Parse("zz"), BooleanClause.Occur.SHOULD);
+			q.Add(qp.Parse("w5"), BooleanClause.Occur.SHOULD);
+			q.Add(qp.Parse("w4"), BooleanClause.Occur.SHOULD);
+			
+			Qtest(q, new int[]{0, 3});
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanationsOfNonMatches.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSimpleExplanationsOfNonMatches.cs?view=auto&rev=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanationsOfNonMatches.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanationsOfNonMatches.cs Sat Aug 11 09:56:37 2007
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using Term = Lucene.Net.Index.Term;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
+using ParseException = Lucene.Net.QueryParsers.ParseException;
+
+namespace Lucene.Net.Search
+{
+	
+	/// <summary> subclass of TestSimpleExplanations that verifies non matches.</summary>
+    [TestFixture]
+    public class TestSimpleExplanationsOfNonMatches : TestSimpleExplanations
+	{
+		
+		/// <summary> Overrides superclass to ignore matches and focus on non-matches
+		/// 
+		/// </summary>
+		/// <seealso cref="CheckHits.checkNoMatchExplanations">
+		/// </seealso>
+		public override void  Qtest(Query q, int[] expDocNrs)
+		{
+			CheckHits.CheckNoMatchExplanations(q, FIELD, searcher, expDocNrs);
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSort.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSort.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSort.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSort.cs Sat Aug 11 09:56:37 2007
@@ -16,13 +16,15 @@
  */
 
 using System;
+
+using NUnit.Framework;
+
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using Lucene.Net.Index;
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using Lucene.Net.Index;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using Pattern = System.Text.RegularExpressions.Regex;
-using NUnit.Framework;
 
 namespace Lucene.Net.Search
 {
@@ -316,7 +318,13 @@
 			sort.SetSort("string", true);
 			AssertMatches(full, queryF, sort, "IJZ");
 			
-			sort.SetSort("int");
+            sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en")));
+            AssertMatches(full, queryF, sort, "ZJI");
+			
+            sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en"), true));
+            AssertMatches(full, queryF, sort, "IJZ");
+			
+            sort.SetSort("int");
 			AssertMatches(full, queryF, sort, "IZJ");
 			
 			sort.SetSort("int", true);