You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ar...@apache.org on 2009/11/03 19:06:38 UTC

svn commit: r832486 [19/29] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene...

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/Spans/TestSpansAdvanced.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/Spans/TestSpansAdvanced.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/Spans/TestSpansAdvanced.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/Spans/TestSpansAdvanced.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,15 +19,14 @@
 
 using NUnit.Framework;
 
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Lucene.Net.Search;
-using Searchable = Lucene.Net.Search.Searchable;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search.Spans
@@ -38,10 +37,8 @@
 	/// work correctly in a BooleanQuery.
 	/// 
 	/// </summary>
-	/// <author>  Reece Wilton
-	/// </author>
-	[TestFixture]
-	public class TestSpansAdvanced : LuceneTestCase
+    [TestFixture]
+	public class TestSpansAdvanced:LuceneTestCase
 	{
 		
 		// location to the index
@@ -55,7 +52,7 @@
 		
 		/// <summary> Initializes the tests by adding 4 identical documents to the index.</summary>
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
 			base.SetUp();
 			base.SetUp();
@@ -63,16 +60,16 @@
 			// create test index
 			mDirectory = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(mDirectory, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-			AddDocument(writer, "1", "I think it should work.");
-			AddDocument(writer, "2", "I think it should work.");
-			AddDocument(writer, "3", "I think it should work.");
-			AddDocument(writer, "4", "I think it should work.");
+			addDocument(writer, "1", "I think it should work.");
+			addDocument(writer, "2", "I think it should work.");
+			addDocument(writer, "3", "I think it should work.");
+			addDocument(writer, "4", "I think it should work.");
 			writer.Close();
 			searcher = new IndexSearcher(mDirectory);
 		}
 		
 		[TearDown]
-		public override void TearDown()
+		public override void  TearDown()
 		{
 			base.TearDown();
 			searcher.Close();
@@ -90,10 +87,10 @@
 		/// <param name="text">the text of the document
 		/// </param>
 		/// <throws>  IOException </throws>
-		protected internal virtual void  AddDocument(IndexWriter writer, System.String id, System.String text)
+		protected internal virtual void  addDocument(IndexWriter writer, System.String id, System.String text)
 		{
 			
-			Lucene.Net.Documents.Document document = new Lucene.Net.Documents.Document();
+			Document document = new Document();
 			document.Add(new Field(FIELD_ID, id, Field.Store.YES, Field.Index.NOT_ANALYZED));
 			document.Add(new Field(FIELD_TEXT, text, Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(document);
@@ -107,14 +104,14 @@
 		public virtual void  TestBooleanQueryWithSpanQueries()
 		{
 			
-			DoTestBooleanQueryWithSpanQueries(searcher, 0.3884282f);
+			doTestBooleanQueryWithSpanQueries(searcher, 0.3884282f);
 		}
 		
 		/// <summary> Tests two span queries.
 		/// 
 		/// </summary>
 		/// <throws>  IOException </throws>
-		protected internal virtual void  DoTestBooleanQueryWithSpanQueries(IndexSearcher s, float expectedScore)
+		protected internal virtual void  doTestBooleanQueryWithSpanQueries(IndexSearcher s, float expectedScore)
 		{
 			
 			Query spanQuery = new SpanTermQuery(new Term(FIELD_TEXT, "work"));
@@ -123,14 +120,14 @@
 			query.Add(spanQuery, BooleanClause.Occur.MUST);
 			System.String[] expectedIds = new System.String[]{"1", "2", "3", "4"};
 			float[] expectedScores = new float[]{expectedScore, expectedScore, expectedScore, expectedScore};
-			AssertHits(s, query, "two span queries", expectedIds, expectedScores);
+			assertHits(s, query, "two span queries", expectedIds, expectedScores);
 		}
 		
 		
 		/// <summary> Checks to see if the hits are what we expected.
 		/// 
 		/// </summary>
-		/// <param name="hits">the search results
+		/// <param name="query">the query to execute
 		/// </param>
 		/// <param name="description">the description of the search
 		/// </param>
@@ -140,24 +137,23 @@
 		/// 
 		/// </param>
 		/// <throws>  IOException </throws>
-		protected internal virtual void  AssertHits(Searcher s, Query query, System.String description, System.String[] expectedIds, float[] expectedScores)
+		protected internal static void  assertHits(Searcher s, Query query, System.String description, System.String[] expectedIds, float[] expectedScores)
 		{
 			QueryUtils.Check(query, s);
-
+			
 			float tolerance = 1e-5f;
 			
 			// Hits hits = searcher.search(query);
 			// hits normalizes and throws things off if one score is greater than 1.0
 			TopDocs topdocs = s.Search(query, null, 10000);
 			
-			/// <summary>**
-			/// // display the hits
-			/// System.out.println(hits.length() + " hits for search: \"" + description + '\"');
-			/// for (int i = 0; i < hits.length(); i++) {
-			/// System.out.println("  " + FIELD_ID + ':' + hits.doc(i).get(FIELD_ID) + " (score:" + hits.score(i) + ')');
-			/// }
-			/// ***
-			/// </summary>
+			/*****
+			// display the hits
+			System.out.println(hits.length() + " hits for search: \"" + description + '\"');
+			for (int i = 0; i < hits.length(); i++) {
+			System.out.println("  " + FIELD_ID + ':' + hits.doc(i).get(FIELD_ID) + " (score:" + hits.score(i) + ')');
+			}
+			*****/
 			
 			// did we get the hits we expected
 			Assert.AreEqual(expectedIds.Length, topdocs.totalHits);
@@ -168,7 +164,7 @@
 				
 				int id = topdocs.scoreDocs[i].doc;
 				float score = topdocs.scoreDocs[i].score;
-				Lucene.Net.Documents.Document doc = s.Doc(id);
+				Document doc = s.Doc(id);
 				Assert.AreEqual(expectedIds[i], doc.Get(FIELD_ID));
 				bool scoreEq = System.Math.Abs(expectedScores[i] - score) < tolerance;
 				if (!scoreEq)

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/Spans/TestSpansAdvanced2.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/Spans/TestSpansAdvanced2.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/Spans/TestSpansAdvanced2.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/Spans/TestSpansAdvanced2.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,12 +19,11 @@
 
 using NUnit.Framework;
 
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Lucene.Net.Search;
-using Searchable = Lucene.Net.Search.Searchable;
 
 namespace Lucene.Net.Search.Spans
 {
@@ -34,24 +33,22 @@
 	/// functionality.
 	/// 
 	/// </summary>
-	/// <author>  Reece Wilton
-	/// </author>
-	[TestFixture]
+    [TestFixture]
 	public class TestSpansAdvanced2:TestSpansAdvanced
 	{
 		internal IndexSearcher searcher2;
 		/// <summary> Initializes the tests by adding documents to the index.</summary>
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
 			base.SetUp();
 			
 			// create test index
 			IndexWriter writer = new IndexWriter(mDirectory, new StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
-			AddDocument(writer, "A", "Should we, could we, would we?");
-			AddDocument(writer, "B", "it should.  Should it?");
-			AddDocument(writer, "C", "it shouldn't.");
-			AddDocument(writer, "D", "Should we, should we, should we.");
+			addDocument(writer, "A", "Should we, could we, would we?");
+			addDocument(writer, "B", "It should.  Should it?");
+			addDocument(writer, "C", "It shouldn't.");
+			addDocument(writer, "D", "Should we, should we, should we.");
 			writer.Close();
 			
 			// re-open the searcher since we added more docs
@@ -81,7 +78,7 @@
 			Query spanQuery = new SpanTermQuery(new Term(FIELD_TEXT, "should"));
 			System.String[] expectedIds = new System.String[]{"B", "D", "1", "2", "3", "4", "A"};
 			float[] expectedScores = new float[]{0.625f, 0.45927936f, 0.35355338f, 0.35355338f, 0.35355338f, 0.35355338f, 0.26516503f};
-			AssertHits(searcher2, spanQuery, "single span query", expectedIds, expectedScores);
+			assertHits(searcher2, spanQuery, "single span query", expectedIds, expectedScores);
 		}
 		
 		/// <summary> Tests a single span query that matches multiple documents.
@@ -99,9 +96,9 @@
 			query.Add(spanQuery2, BooleanClause.Occur.MUST);
 			System.String[] expectedIds = new System.String[]{"D", "A"};
 			// these values were pre LUCENE-413
-			// float[] expectedScores = new float[]{0.93163157f, 0.20698164f};
+			// final float[] expectedScores = new float[] { 0.93163157f, 0.20698164f };
 			float[] expectedScores = new float[]{1.0191123f, 0.93163157f};
-			AssertHits(searcher2, query, "multiple different span queries", expectedIds, expectedScores);
+			assertHits(searcher2, query, "multiple different span queries", expectedIds, expectedScores);
 		}
 		
 		/// <summary> Tests two span queries.
@@ -112,7 +109,7 @@
 		public override void  TestBooleanQueryWithSpanQueries()
 		{
 			
-			DoTestBooleanQueryWithSpanQueries(searcher2, 0.73500174f);
+			doTestBooleanQueryWithSpanQueries(searcher2, 0.73500174f);
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestBoolean2.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestBoolean2.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestBoolean2.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestBoolean2.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,6 +19,7 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
@@ -26,18 +27,16 @@
 using ParseException = Lucene.Net.QueryParsers.ParseException;
 using QueryParser = Lucene.Net.QueryParsers.QueryParser;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	
-	
 	/// <summary>Test BooleanQuery2 against BooleanQuery by overriding the standard query parser.
 	/// This also tests the scoring order of BooleanQuery.
 	/// </summary>
-	[TestFixture]
-	public class TestBoolean2 : LuceneTestCase
+    [TestFixture]
+	public class TestBoolean2:LuceneTestCase
 	{
 		[Serializable]
 		private class AnonymousClassDefaultSimilarity:DefaultSimilarity
@@ -69,14 +68,14 @@
 		public const System.String field = "field";
 		
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
 			base.SetUp();
 			RAMDirectory directory = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			for (int i = 0; i < docFields.Length; i++)
 			{
-				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+				Document doc = new Document();
 				doc.Add(new Field(field, docFields[i], Field.Store.NO, Field.Index.ANALYZED));
 				writer.AddDocument(doc);
 			}
@@ -88,7 +87,7 @@
 		
 		public virtual Query MakeQuery(System.String queryText)
 		{
-			Query q = (new Lucene.Net.QueryParsers.QueryParser(field, new WhitespaceAnalyzer())).Parse(queryText);
+			Query q = (new QueryParser(field, new WhitespaceAnalyzer())).Parse(queryText);
 			return q;
 		}
 		
@@ -104,7 +103,7 @@
 				
 				Query query2 = MakeQuery(queryText); // there should be no need to parse again...
 				BooleanQuery.SetAllowDocsOutOfOrder(false);
-                ScoreDoc[] hits2 = searcher.Search(query2, null, 1000).scoreDocs;
+				ScoreDoc[] hits2 = searcher.Search(query2, null, 1000).scoreDocs;
 				
 				CheckHits.CheckHitsQuery(query2, hits1, hits2, expDocNrs);
 			}
@@ -199,12 +198,13 @@
 		[Test]
 		public virtual void  TestRandomQueries()
 		{
-			System.Random rnd = new System.Random((System.Int32) 0);
+			System.Random rnd = NewRandom();
 			
 			System.String[] vals = new System.String[]{"w1", "w2", "w3", "w4", "w5", "xx", "yy", "zzz"};
 			
 			int tot = 0;
 			
+			BooleanQuery q1 = null;
 			try
 			{
 				
@@ -212,7 +212,7 @@
 				for (int i = 0; i < 1000; i++)
 				{
 					int level = rnd.Next(3);
-					BooleanQuery q1 = RandBoolQuery(new System.Random((System.Int32) i), level, field, vals, null);
+					q1 = RandBoolQuery(new System.Random((System.Int32) rnd.Next(System.Int32.MaxValue)), level, field, vals, null);
 					
 					// Can't sort by relevance since floating point numbers may not quite
 					// match up.
@@ -221,15 +221,21 @@
 					BooleanQuery.SetAllowDocsOutOfOrder(false);
 					
 					QueryUtils.Check(q1, searcher);
-
-                    ScoreDoc[] hits1 = searcher.Search(q1, null, 1000, sort).scoreDocs;
+					
+					ScoreDoc[] hits1 = searcher.Search(q1, null, 1000, sort).scoreDocs;
 					
 					BooleanQuery.SetAllowDocsOutOfOrder(true);
-                    ScoreDoc[] hits2 = searcher.Search(q1, null, 1000, sort).scoreDocs;
+					ScoreDoc[] hits2 = searcher.Search(q1, null, 1000, sort).scoreDocs;
 					tot += hits2.Length;
 					CheckHits.CheckEqual(q1, hits1, hits2);
 				}
 			}
+			catch (System.Exception e)
+			{
+				// For easier debugging
+				System.Console.Out.WriteLine("failed query: " + q1);
+				throw e;
+			}
 			finally
 			{
 				// even when a test fails.
@@ -278,7 +284,6 @@
 			}
 			if (cb != null)
 				cb.PostCreate(current);
-			((System.Collections.ArrayList)current.Clauses()).TrimToSize();
 			return current;
 		}
 	}

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestBooleanMinShouldMatch.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestBooleanMinShouldMatch.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestBooleanMinShouldMatch.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestBooleanMinShouldMatch.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,6 +19,7 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
@@ -26,16 +27,14 @@
 using Term = Lucene.Net.Index.Term;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	
-	
 	/// <summary>Test that BooleanQuery.setMinimumNumberShouldMatch works.</summary>
-	[TestFixture]
-	public class TestBooleanMinShouldMatch : LuceneTestCase
+    [TestFixture]
+	public class TestBooleanMinShouldMatch:LuceneTestCase
 	{
 		private class AnonymousClassCallback : TestBoolean2.Callback
 		{
@@ -48,10 +47,8 @@
 				this.rnd = rnd;
 				this.enclosingInstance = enclosingInstance;
 			}
-
 			private System.Random rnd;
 			private TestBooleanMinShouldMatch enclosingInstance;
-
 			public TestBooleanMinShouldMatch Enclosing_Instance
 			{
 				get
@@ -79,10 +76,8 @@
 		public IndexSearcher s;
 		
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
-			
-			
 			base.SetUp();
 			
 			
@@ -93,7 +88,7 @@
 			
 			for (int i = 0; i < data.Length; i++)
 			{
-				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+				Document doc = new Document();
 				doc.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED)); //Field.Keyword("id",String.valueOf(i)));
 				doc.Add(new Field("all", "all", Field.Store.YES, Field.Index.NOT_ANALYZED)); //Field.Keyword("all","all"));
 				if (null != data[i])
@@ -117,7 +112,7 @@
 			ScoreDoc[] h = s.Search(q, null, 1000).scoreDocs;
 			if (expected != h.Length)
 			{
-				PrintHits("TestBooleanMinShouldMatch", h, s);  // PrintHits(TestCase.GetName(), h);    // {{Aroush-1.9}} 'GetName()' gives us the name of the test in JUnit, how is it done in NUnit?
+				PrintHits("getName()", h, s);  // {{Aroush-2.9}} String junit.framework.TestCase.getName()
 			}
 			Assert.AreEqual(expected, h.Length, "result count");
 			QueryUtils.Check(q, s);
@@ -342,24 +337,24 @@
 			
 			VerifyNrHits(q, 0);
 		}
-
-        [Test]
-        public virtual void TestNoOptionalButMin2()
-        {
-
-            /* one required, no optional */
-            BooleanQuery q = new BooleanQuery();
-            q.Add(new TermQuery(new Term("all", "all")), BooleanClause.Occur.MUST); //true,  false);
-
-            q.SetMinimumNumberShouldMatch(1); // 1 of 0 optional 
-
-            VerifyNrHits(q, 0);
-        }
-
-        [Test]
+		
+		[Test]
+		public virtual void  TestNoOptionalButMin2()
+		{
+			
+			/* one required, no optional */
+			BooleanQuery q = new BooleanQuery();
+			q.Add(new TermQuery(new Term("all", "all")), BooleanClause.Occur.MUST); //true,  false);
+			
+			q.SetMinimumNumberShouldMatch(1); // 1 of 0 optional 
+			
+			VerifyNrHits(q, 0);
+		}
+		
+		[Test]
 		public virtual void  TestRandomQueries()
 		{
-			System.Random rnd = new System.Random((System.Int32) 0);
+			System.Random rnd = NewRandom();
 			
 			System.String field = "data";
 			System.String[] vals = new System.String[]{"1", "2", "3", "4", "5", "6", "A", "Z", "B", "Y", "Z", "X", "foo"};
@@ -369,14 +364,15 @@
 			TestBoolean2.Callback minNrCB = new AnonymousClassCallback(rnd, this);
 			
 			
-
+			
 			// increase number of iterations for more complete testing      
 			for (int i = 0; i < 1000; i++)
 			{
 				int lev = rnd.Next(maxLev);
-				BooleanQuery q1 = TestBoolean2.RandBoolQuery(new System.Random((System.Int32) i), lev, field, vals, null);
-				// BooleanQuery q2 = TestBoolean2.randBoolQuery(new Random(i), lev, field, vals, minNrCB);
-				BooleanQuery q2 = TestBoolean2.RandBoolQuery(new System.Random((System.Int32) i), lev, field, vals, null);
+				long seed = rnd.Next(System.Int32.MaxValue);
+				BooleanQuery q1 = TestBoolean2.RandBoolQuery(new System.Random((System.Int32) seed), lev, field, vals, null);
+				// BooleanQuery q2 = TestBoolean2.randBoolQuery(new Random(seed), lev, field, vals, minNrCB);
+				BooleanQuery q2 = TestBoolean2.RandBoolQuery(new System.Random((System.Int32) seed), lev, field, vals, null);
 				// only set minimumNumberShouldMatch on the top level query since setting
 				// at a lower level can change the score.
 				minNrCB.PostCreate(q2);
@@ -394,6 +390,7 @@
 				// should be a superset to the unconstrained query.
 				if (top2.totalHits > top1.totalHits)
 				{
+					//TestCase.fail("Constrained results not a subset:\n" + CheckHits.TopdocsString(top1, 0, 0) + CheckHits.TopdocsString(top2, 0, 0) + "for query:" + q2.ToString());
 					Assert.Fail("Constrained results not a subset:\n" + CheckHits.TopdocsString(top1, 0, 0) + CheckHits.TopdocsString(top2, 0, 0) + "for query:" + q2.ToString());
 				}
 				
@@ -412,6 +409,7 @@
 							// check if scores match
 							if (System.Math.Abs(otherScore - score) > 1.0e-6f)
 							{
+								//TestCase.fail("Doc " + id + " scores don't match\n" + CheckHits.TopdocsString(top1, 0, 0) + CheckHits.TopdocsString(top2, 0, 0) + "for query:" + q2.ToString());
 								Assert.Fail("Doc " + id + " scores don't match\n" + CheckHits.TopdocsString(top1, 0, 0) + CheckHits.TopdocsString(top2, 0, 0) + "for query:" + q2.ToString());
 							}
 						}
@@ -420,6 +418,7 @@
 					// check if subset
 					if (!found)
 					{
+						//TestCase.fail("Doc " + id + " not found\n" + CheckHits.TopdocsString(top1, 0, 0) + CheckHits.TopdocsString(top2, 0, 0) + "for query:" + q2.ToString());
 						Assert.Fail("Doc " + id + " not found\n" + CheckHits.TopdocsString(top1, 0, 0) + CheckHits.TopdocsString(top2, 0, 0) + "for query:" + q2.ToString());
 					}
 				}
@@ -436,8 +435,8 @@
 			
 			for (int i = 0; i < h.Length; i++)
 			{
-				Lucene.Net.Documents.Document d = searcher.Doc(h[i].doc);
-                float score = h[i].score;
+				Document d = searcher.Doc(h[i].doc);
+				float score = h[i].score;
 				System.Console.Error.WriteLine("#" + i + ": {0.000000}" + score + " - " + d.Get("id") + " - " + d.Get("data"));
 			}
 		}

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestBooleanOr.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestBooleanOr.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestBooleanOr.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestBooleanOr.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,22 +19,19 @@
 
 using NUnit.Framework;
 
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	
-	/// <summary> Created on 2005. 2. 9.
-	/// <br>Adapted to Lucene testcase by Paul Elschot.
-	/// </summary>
-	[TestFixture]
-	public class TestBooleanOr : LuceneTestCase
+    [TestFixture]
+	public class TestBooleanOr:LuceneTestCase
 	{
 		
 		private static System.String FIELD_T = "T";
@@ -50,8 +47,8 @@
 		private int Search(Query q)
 		{
 			QueryUtils.Check(q, searcher);
-            return searcher.Search(q, null, 1000).totalHits;
-        }
+			return searcher.Search(q, null, 1000).totalHits;
+		}
 		
 		[Test]
 		public virtual void  TestElements()
@@ -139,11 +136,11 @@
 		}
 		
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
-			//base.SetUp();
 			base.SetUp();
-
+			base.SetUp();
+			
 			//
 			RAMDirectory rd = new RAMDirectory();
 			
@@ -151,7 +148,7 @@
 			IndexWriter writer = new IndexWriter(rd, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
 			//
-			Lucene.Net.Documents.Document d = new Lucene.Net.Documents.Document();
+			Document d = new Document();
 			d.Add(new Field(FIELD_T, "Optimize not deleting all files", Field.Store.YES, Field.Index.ANALYZED));
 			d.Add(new Field(FIELD_C, "Deleted When I run an optimize in our production environment.", Field.Store.YES, Field.Index.ANALYZED));
 			

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestBooleanPrefixQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestBooleanPrefixQuery.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestBooleanPrefixQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestBooleanPrefixQuery.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,40 +19,63 @@
 
 using NUnit.Framework;
 
-//using TestRunner = junit.textui.TestRunner;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	
 	/// <summary> </summary>
-	/// <version>  $Id: TestBooleanPrefixQuery.java 583534 2007-10-10 16:46:35Z mikemccand $
+	/// <version>  $Id: TestBooleanPrefixQuery.java 808519 2009-08-27 16:57:27Z mikemccand $
 	/// 
 	/// </version>
 	
-	[TestFixture]
-	public class TestBooleanPrefixQuery : LuceneTestCase
+    [TestFixture]
+	public class TestBooleanPrefixQuery:LuceneTestCase
 	{
 		
 		[STAThread]
 		public static void  Main(System.String[] args)
 		{
-			// NUnit.Core.TestRunner.Run(Suite());  // {{Aroush}} where is 'TestRunner' in NUnit?
+			// TestRunner.run(suite()); // {{Aroush-2.9}} how is this done in NUnit?
+		}
+		
+		/*public static Test suite()
+		{
+			return new TestSuite(typeof(TestBooleanPrefixQuery));
+		}*/
+		
+		public TestBooleanPrefixQuery(System.String name):base(name)
+		{
 		}
 		
-		/*  // {{Aroush}} Do we need this method?
-		public static TestCase Suite()
+		private int GetCount(IndexReader r, Query q)
 		{
-			return new NUnit.Core.TestSuite(typeof(TestBooleanPrefixQuery));
+			if (q is BooleanQuery)
+			{
+				return ((BooleanQuery) q).GetClauses().Length;
+			}
+			else if (q is ConstantScoreQuery)
+			{
+				DocIdSetIterator iter = ((ConstantScoreQuery) q).GetFilter().GetDocIdSet(r).Iterator();
+				int count = 0;
+				while (iter.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+				{
+					count++;
+				}
+				return count;
+			}
+			else
+			{
+				throw new System.SystemException("unepxected query " + q);
+			}
 		}
-		*/
 		
 		[Test]
 		public virtual void  TestMethod()
@@ -63,20 +86,20 @@
 			
 			Query rw1 = null;
 			Query rw2 = null;
+			IndexReader reader = null;
 			try
 			{
 				IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 				for (int i = 0; i < categories.Length; i++)
 				{
-					Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+					Document doc = new Document();
 					doc.Add(new Field("category", categories[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
 					writer.AddDocument(doc);
 				}
 				writer.Close();
 				
-				IndexReader reader = IndexReader.Open(directory);
+				reader = IndexReader.Open(directory);
 				PrefixQuery query = new PrefixQuery(new Term("category", "foo"));
-				
 				rw1 = query.Rewrite(reader);
 				
 				BooleanQuery bq = new BooleanQuery();
@@ -89,23 +112,7 @@
 				Assert.Fail(e.Message);
 			}
 			
-			BooleanQuery bq1 = null;
-			if (rw1 is BooleanQuery)
-			{
-				bq1 = (BooleanQuery) rw1;
-			}
-			
-			BooleanQuery bq2 = null;
-			if (rw2 is BooleanQuery)
-			{
-				bq2 = (BooleanQuery) rw2;
-			}
-			else
-			{
-				Assert.Fail("Rewrite");
-			}
-			
-			Assert.AreEqual(bq1.GetClauses().Length, bq2.GetClauses().Length, "Number of Clauses Mismatch");
+			Assert.AreEqual(GetCount(reader, rw1), GetCount(reader, rw2), "Number of Clauses Mismatch");
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestBooleanQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestBooleanQuery.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestBooleanQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestBooleanQuery.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,13 +19,21 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
+using Directory = Lucene.Net.Store.Directory;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
-	[TestFixture]
-	public class TestBooleanQuery : LuceneTestCase
+	
+    [TestFixture]
+	public class TestBooleanQuery:LuceneTestCase
 	{
 		
 		[Test]
@@ -58,10 +66,47 @@
 				BooleanQuery.SetMaxClauseCount(0);
 				Assert.Fail();
 			}
-			catch (System.ArgumentException)
+			catch (System.ArgumentException e)
 			{
 				// okay
 			}
 		}
+		
+		// LUCENE-1630
+		[Test]
+		public virtual void  TestNullOrSubScorer()
+		{
+			Directory dir = new MockRAMDirectory();
+			IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
+			Document doc = new Document();
+			doc.Add(new Field("field", "a b c d", Field.Store.NO, Field.Index.ANALYZED));
+			w.AddDocument(doc);
+			IndexReader r = w.GetReader();
+			IndexSearcher s = new IndexSearcher(r);
+			BooleanQuery q = new BooleanQuery();
+			q.Add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
+			
+			// PhraseQuery w/ no terms added returns a null scorer
+			PhraseQuery pq = new PhraseQuery();
+			q.Add(pq, BooleanClause.Occur.SHOULD);
+			Assert.AreEqual(1, s.Search(q, 10).totalHits);
+			
+			// A required clause which returns null scorer should return null scorer to
+			// IndexSearcher.
+			q = new BooleanQuery();
+			pq = new PhraseQuery();
+			q.Add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
+			q.Add(pq, BooleanClause.Occur.MUST);
+			Assert.AreEqual(0, s.Search(q, 10).totalHits);
+			
+			DisjunctionMaxQuery dmq = new DisjunctionMaxQuery(1.0f);
+			dmq.Add(new TermQuery(new Term("field", "a")));
+			dmq.Add(pq);
+			Assert.AreEqual(1, s.Search(dmq, 10).totalHits);
+			
+			r.Close();
+			w.Close();
+			dir.Close();
+		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestBooleanScorer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestBooleanScorer.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestBooleanScorer.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestBooleanScorer.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,27 +19,89 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	
 	/// <summary> </summary>
-	/// <version>  $rcs = ' $Id: TestBooleanScorer.java 583534 2007-10-10 16:46:35Z mikemccand $ ' ;
+	/// <version>  $rcs = ' $Id: TestBooleanScorer.java 782410 2009-06-07 16:58:41Z mikemccand $ ' ;
 	/// </version>
-	[TestFixture]
-	public class TestBooleanScorer : LuceneTestCase
+    [TestFixture]
+	public class TestBooleanScorer:LuceneTestCase
 	{
+		private class AnonymousClassScorer:Scorer
+		{
+			private void  InitBlock(TestBooleanScorer enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestBooleanScorer enclosingInstance;
+			public TestBooleanScorer Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal AnonymousClassScorer(TestBooleanScorer enclosingInstance, Lucene.Net.Search.Similarity Param1):base(Param1)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private int doc = - 1;
+			public override Explanation Explain(int doc)
+			{
+				return null;
+			}
+			public override float Score()
+			{
+				return 0;
+			}
+			/// <deprecated> delete in 3.0. 
+			/// </deprecated>
+			public override int Doc()
+			{
+				return 3000;
+			}
+			public override int DocID()
+			{
+				return doc;
+			}
+			/// <deprecated> delete in 3.0 
+			/// </deprecated>
+			public override bool Next()
+			{
+				return NextDoc() != NO_MORE_DOCS;
+			}
+			
+			public override int NextDoc()
+			{
+				return doc = doc == - 1?3000:NO_MORE_DOCS;
+			}
+			
+			/// <deprecated> delete in 3.0 
+			/// </deprecated>
+			public override bool SkipTo(int target)
+			{
+				return Advance(target) != NO_MORE_DOCS;
+			}
+			
+			public override int Advance(int target)
+			{
+				return doc = target <= 3000?3000:NO_MORE_DOCS;
+			}
+		}
 		
-		//public TestBooleanScorer(System.String name) : base(name)
-		//{
-		//}
+		public TestBooleanScorer(System.String name):base(name)
+		{
+		}
 		
 		private const System.String FIELD = "category";
 		
@@ -55,18 +117,18 @@
 				IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 				for (int i = 0; i < values.Length; i++)
 				{
-					Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+					Document doc = new Document();
 					doc.Add(new Field(FIELD, values[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
 					writer.AddDocument(doc);
 				}
 				writer.Close();
 				
-				BooleanQuery boolQuery1 = new BooleanQuery();
-				boolQuery1.Add(new TermQuery(new Term(FIELD, "1")), BooleanClause.Occur.SHOULD);
-				boolQuery1.Add(new TermQuery(new Term(FIELD, "2")), BooleanClause.Occur.SHOULD);
+				BooleanQuery booleanQuery1 = new BooleanQuery();
+				booleanQuery1.Add(new TermQuery(new Term(FIELD, "1")), BooleanClause.Occur.SHOULD);
+				booleanQuery1.Add(new TermQuery(new Term(FIELD, "2")), BooleanClause.Occur.SHOULD);
 				
 				BooleanQuery query = new BooleanQuery();
-				query.Add(boolQuery1, BooleanClause.Occur.MUST);
+				query.Add(booleanQuery1, BooleanClause.Occur.MUST);
 				query.Add(new TermQuery(new Term(FIELD, "9")), BooleanClause.Occur.MUST_NOT);
 				
 				IndexSearcher indexSearcher = new IndexSearcher(directory);
@@ -78,5 +140,21 @@
 				Assert.Fail(e.Message);
 			}
 		}
+		
+		[Test]
+		public virtual void  TestEmptyBucketWithMoreDocs()
+		{
+			// This test checks the logic of nextDoc() when all sub scorers have docs
+			// beyond the first bucket (for example). Currently, the code relies on the
+			// 'more' variable to work properly, and this test ensures that if the logic
+			// changes, we have a test to back it up.
+			
+			Similarity sim = Similarity.GetDefault();
+			Scorer[] scorers = new Scorer[]{new AnonymousClassScorer(this, sim)};
+			BooleanScorer bs = new BooleanScorer(sim, 1, new System.Collections.ArrayList(scorers), null);
+			
+			Assert.AreEqual(3000, bs.NextDoc(), "should have received 3000");
+			Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, bs.NextDoc(), "should have received NO_MORE_DOCS");
+		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestCachingWrapperFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestCachingWrapperFilter.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestCachingWrapperFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestCachingWrapperFilter.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,17 +19,18 @@
 
 using NUnit.Framework;
 
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
-	[TestFixture]
-	public class TestCachingWrapperFilter : LuceneTestCase
+	
+    [TestFixture]
+	public class TestCachingWrapperFilter:LuceneTestCase
 	{
 		[Test]
 		public virtual void  TestCachingWorks()
@@ -47,9 +48,13 @@
 			cacher.GetDocIdSet(reader);
 			Assert.IsTrue(filter.WasCalled(), "first time");
 			
+			// make sure no exception if cache is holding the wrong bitset
+			cacher.Bits(reader);
+			cacher.GetDocIdSet(reader);
+			
 			// second time, nested filter should not be called
 			filter.Clear();
-            cacher.GetDocIdSet(reader);
+			cacher.GetDocIdSet(reader);
 			Assert.IsFalse(filter.WasCalled(), "second time");
 			
 			reader.Close();

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestComplexExplanations.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestComplexExplanations.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestComplexExplanations.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestComplexExplanations.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -29,11 +29,11 @@
 	/// on the assumption that if the explanations work out right for them,
 	/// they should work for anything.
 	/// </summary>
-	[TestFixture]
-	public class TestComplexExplanations : TestExplanations
+    [TestFixture]
+	public class TestComplexExplanations:TestExplanations
 	{
 		[Serializable]
-		private class AnonymousClassDefaultSimilarity : DefaultSimilarity
+		private class AnonymousClassDefaultSimilarity:DefaultSimilarity
 		{
 			public override float QueryNorm(float sumOfSquaredWeights)
 			{
@@ -48,11 +48,11 @@
 		public override void  SetUp()
 		{
 			base.SetUp();
-			searcher.SetSimilarity(new AnonymousClassDefaultSimilarity());
+			searcher.SetSimilarity(createQnorm1Similarity());
 		}
 		
 		// must be static for weight serialization tests 
-		private static DefaultSimilarity CreateQnorm1Similarity()
+		private static DefaultSimilarity createQnorm1Similarity()
 		{
 			return new AnonymousClassDefaultSimilarity();
 		}
@@ -209,27 +209,23 @@
 			// NOTE: using qtest not bqtest
 			Qtest("w1 w2^0.0", new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ13()
 		{
 			// NOTE: using qtest not bqtest
 			Qtest("w1 -w5^0.0", new int[]{1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ18()
 		{
 			// NOTE: using qtest not bqtest
 			Qtest("+w1^0.0 w2", new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ21()
 		{
 			Bqtest("(+w1 w2)^0.0", new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ22()
 		{
@@ -243,7 +239,6 @@
 			q.SetBoost(0);
 			Bqtest(q, new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestST6()
 		{
@@ -259,7 +254,6 @@
 			q.SetBoost(0);
 			Bqtest(q, new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestSF7()
 		{
@@ -275,7 +269,6 @@
 			q.SetBoost(0);
 			Bqtest(q, new int[]{0, 1, 2, 3});
 		}
-		
 		[Test]
 		public virtual void  TestSNot6()
 		{
@@ -293,7 +286,6 @@
 			SpanQuery q = Snot(f, St("xx"));
 			Qtest(q, new int[]{0, 1, 3});
 		}
-
 		[Test]
 		public virtual void  TestSNot9()
 		{

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestComplexExplanationsOfNonMatches.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestComplexExplanationsOfNonMatches.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestComplexExplanationsOfNonMatches.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestComplexExplanationsOfNonMatches.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,22 +19,21 @@
 
 using NUnit.Framework;
 
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using IndexWriter = Lucene.Net.Index.IndexWriter;
-using IndexReader = Lucene.Net.Index.IndexReader;
-using Term = Lucene.Net.Index.Term;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using QueryParser = Lucene.Net.QueryParsers.QueryParser;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
 using ParseException = Lucene.Net.QueryParsers.ParseException;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 
 namespace Lucene.Net.Search
 {
 	
 	/// <summary> subclass of TestSimpleExplanations that verifies non matches.</summary>
-	[TestFixture]
-	public class TestComplexExplanationsOfNonMatches : TestComplexExplanations
+	public class TestComplexExplanationsOfNonMatches:TestComplexExplanations
 	{
 		
 		/// <summary> Overrides superclass to ignore matches and focus on non-matches

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestCustomSearcherSort.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestCustomSearcherSort.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestCustomSearcherSort.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestCustomSearcherSort.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -28,6 +28,7 @@
 using Term = Lucene.Net.Index.Term;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -35,61 +36,68 @@
 	/// <summary> Unit test for sorting code.
 	/// 
 	/// </summary>
+	
 	[Serializable]
-	[TestFixture]
-	public class TestCustomSearcherSort
+    [TestFixture]
+	public class TestCustomSearcherSort:LuceneTestCase
 	{
-		private Directory Index
-		{
-			// create an index for testing
-			
-			get
-			{
-				RAMDirectory indexStore = new RAMDirectory();
-				IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-				RandomGen random = new RandomGen(this);
-				for (int i = 0; i < INDEX_SIZE; ++i)
-				{
-					// don't decrease; if to low the problem doesn't show up
-					Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-					if ((i % 5) != 0)
-					{
-						// some documents must not have an entry in the first sort field
-						doc.Add(new Field("publicationDate_", random.GetLuceneDate(), Field.Store.YES, Field.Index.NOT_ANALYZED));
-					}
-					if ((i % 7) == 0)
-					{
-						// some documents to match the query (see below) 
-						doc.Add(new Field("content", "test", Field.Store.YES, Field.Index.ANALYZED));
-					}
-					// every document has a defined 'mandant' field
-					doc.Add(new Field("mandant", System.Convert.ToString(i % 3), Field.Store.YES, Field.Index.NOT_ANALYZED));
-					writer.AddDocument(doc);
-				}
-				writer.Optimize();
-				writer.Close();
-				return indexStore;
-			}
-			
-		}
 		
 		private Directory index = null;
 		private Query query = null;
 		// reduced from 20000 to 2000 to speed up test...
 		private const int INDEX_SIZE = 2000;
 		
+		public TestCustomSearcherSort(System.String name):base(name)
+		{
+		}
 		
 		[STAThread]
 		public static void  Main(System.String[] argv)
 		{
-			// TestRunner.run(Suite()); // {{Aroush}} how is this done in NUnit?
+			// TestRunner.run(suite()); // {{Aroush-2.9}} how is this done in NUnit?
+		}
+		
+		/*public static Test suite()
+		{
+			return new TestSuite(typeof(TestCustomSearcherSort));
+		}*/
+		
+		
+		// create an index for testing
+		private Directory GetIndex()
+		{
+			RAMDirectory indexStore = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			RandomGen random = new RandomGen(this, NewRandom());
+			for (int i = 0; i < INDEX_SIZE; ++i)
+			{
+				// don't decrease; if to low the problem doesn't show up
+				Document doc = new Document();
+				if ((i % 5) != 0)
+				{
+					// some documents must not have an entry in the first sort field
+					doc.Add(new Field("publicationDate_", random.GetLuceneDate(), Field.Store.YES, Field.Index.NOT_ANALYZED));
+				}
+				if ((i % 7) == 0)
+				{
+					// some documents to match the query (see below) 
+					doc.Add(new Field("content", "test", Field.Store.YES, Field.Index.ANALYZED));
+				}
+				// every document has a defined 'mandant' field
+				doc.Add(new Field("mandant", System.Convert.ToString(i % 3), Field.Store.YES, Field.Index.NOT_ANALYZED));
+				writer.AddDocument(doc);
+			}
+			writer.Optimize();
+			writer.Close();
+			return indexStore;
 		}
 		
 		/// <summary> Create index and query for test cases. </summary>
 		[SetUp]
-		public virtual void  SetUp()
+		public override void  SetUp()
 		{
-			index = Index;
+			base.SetUp();
+			index = GetIndex();
 			query = new TermQuery(new Term("content", "test"));
 		}
 		
@@ -111,7 +119,7 @@
 			// log("Run testFieldSortSingleSearcher");
 			// define the sort criteria
 			Sort custSort = new Sort(new SortField[]{new SortField("publicationDate_"), SortField.FIELD_SCORE});
-			Searcher searcher = new MultiSearcher(new Lucene.Net.Search.Searchable[]{new CustomSearcher(this, index, 2)});
+			Searcher searcher = new MultiSearcher(new Searcher[]{new CustomSearcher(this, index, 2)});
 			// search and check hits
 			MatchHits(searcher, custSort);
 		}
@@ -122,7 +130,7 @@
 			// log("Run testFieldSortMultiCustomSearcher");
 			// define the sort criteria
 			Sort custSort = new Sort(new SortField[]{new SortField("publicationDate_"), SortField.FIELD_SCORE});
-			Searcher searcher = new MultiSearcher(new Lucene.Net.Search.Searchable[]{new CustomSearcher(this, index, 0), new CustomSearcher(this, index, 2)});
+			Searcher searcher = new MultiSearcher(new Searchable[]{new CustomSearcher(this, index, 0), new CustomSearcher(this, index, 2)});
 			// search and check hits
 			MatchHits(searcher, custSort);
 		}
@@ -145,7 +153,6 @@
 			ScoreDoc[] resultSort = searcher.Search(query, null, 1000, sort).scoreDocs;
 			CheckHits(resultSort, "Sort by custom criteria: "); // check for duplicates
 			
-			System.String lf = SupportClass.AppSettings.Get("line.separator", "\n");
 			// besides the sorting both sets of hits must be identical
 			for (int hitid = 0; hitid < resultSort.Length; ++hitid)
 			{
@@ -181,23 +188,24 @@
 				for (int docnum = 0; docnum < hits.Length; ++docnum)
 				{
 					System.Int32 luceneId;
-                    luceneId = (System.Int32)hits[docnum].doc;
-                    if (idMap.Contains(luceneId))
-                    {
-                        System.Text.StringBuilder message = new System.Text.StringBuilder(prefix);
-                        message.Append("Duplicate key for hit index = ");
-                        message.Append(docnum);
-                        message.Append(", previous index = ");
-                        message.Append(((System.Int32)idMap[luceneId]).ToString());
-                        message.Append(", Lucene ID = ");
-                        message.Append(luceneId);
-                        Log(message.ToString());
-                    }
-                    else
-                    {
-                        idMap[luceneId] = (System.Int32)docnum;
-                    }
-                }
+					
+					luceneId = (System.Int32) hits[docnum].doc;
+					if (idMap.Contains(luceneId))
+					{
+						System.Text.StringBuilder message = new System.Text.StringBuilder(prefix);
+						message.Append("Duplicate key for hit index = ");
+						message.Append(docnum);
+						message.Append(", previous index = ");
+						message.Append(((System.Int32) idMap[luceneId]).ToString());
+						message.Append(", Lucene ID = ");
+						message.Append(luceneId);
+						Log(message.ToString());
+					}
+					else
+					{
+						idMap[luceneId] = (System.Int32) docnum;
+					}
+				}
 			}
 		}
 		
@@ -207,7 +215,7 @@
 			System.Console.Out.WriteLine(message);
 		}
 		
-		public class CustomSearcher : IndexSearcher
+		public class CustomSearcher:IndexSearcher
 		{
 			private void  InitBlock(TestCustomSearcherSort enclosingInstance)
 			{
@@ -247,7 +255,7 @@
 				this.switcher = switcher;
 			}
 			/* (non-Javadoc)
-			* @see Lucene.Net.search.Searchable#search(Lucene.Net.search.Query, Lucene.Net.search.Filter, int, Lucene.Net.search.Sort)
+			* @see Lucene.Net.Search.Searchable#search(Lucene.Net.Search.Query, Lucene.Net.Search.Filter, int, Lucene.Net.Search.Sort)
 			*/
 			public override TopFieldDocs Search(Query query, Filter filter, int nDocs, Sort sort)
 			{
@@ -257,7 +265,7 @@
 				return base.Search(bq, filter, nDocs, sort);
 			}
 			/* (non-Javadoc)
-			* @see Lucene.Net.search.Searchable#search(Lucene.Net.search.Query, Lucene.Net.search.Filter, int)
+			* @see Lucene.Net.Search.Searchable#search(Lucene.Net.Search.Query, Lucene.Net.Search.Filter, int)
 			*/
 			public override TopDocs Search(Query query, Filter filter, int nDocs)
 			{
@@ -267,13 +275,8 @@
 				return base.Search(bq, filter, nDocs);
 			}
 		}
-
 		private class RandomGen
 		{
-			public RandomGen(TestCustomSearcherSort enclosingInstance)
-			{
-				InitBlock(enclosingInstance);
-			}
 			private void  InitBlock(TestCustomSearcherSort enclosingInstance)
 			{
 				this.enclosingInstance = enclosingInstance;
@@ -284,26 +287,24 @@
 			private TestCustomSearcherSort enclosingInstance;
 			public TestCustomSearcherSort Enclosing_Instance
 			{
-				// Just to generate some different Lucene Date strings
-				
 				get
 				{
 					return enclosingInstance;
 				}
 				
 			}
-
-			private System.Random random = new System.Random((System.Int32) 0); // to generate some arbitrary contents
-			// private System.Globalization.Calendar base_Renamed;
+			internal RandomGen(TestCustomSearcherSort enclosingInstance, System.Random random)
+			{
+				InitBlock(enclosingInstance);
+				this.random = random;
+			}
+			private System.Random random;
 			private System.DateTime base_Renamed;
 			
 			// Just to generate some different Lucene Date strings
-			public System.String GetLuceneDate()
+			public /*private*/ System.String GetLuceneDate()
 			{
-				long v1 = base_Renamed.Millisecond;
-				long v2 = random.Next();
-				long v3 = System.Int32.MinValue;
-				return DateTools.TimeToString(v1 + v2 - v3, DateTools.Resolution.DAY);
+                return DateTools.TimeToString(base_Renamed.Millisecond + random.Next() - System.Int32.MinValue, DateTools.Resolution.DAY);
 			}
 		}
 	}

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDateFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDateFilter.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDateFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDateFilter.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,13 +19,13 @@
 
 using NUnit.Framework;
 
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using DateTools = Lucene.Net.Documents.DateTools;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
@@ -35,25 +35,28 @@
 	/// 
 	/// 
 	/// </summary>
-	/// <version>  $Revision: 583534 $
+	/// <version>  $Revision: 791175 $
 	/// </version>
-	[TestFixture]
-	public class TestDateFilter : LuceneTestCase
+    [TestFixture]
+	public class TestDateFilter:LuceneTestCase
 	{
+		public TestDateFilter(System.String name):base(name)
+		{
+		}
 		
 		/// <summary> </summary>
 		[Test]
-		public virtual void  TestBefore()
+		public static void  TestBefore()
 		{
 			// create an index
 			RAMDirectory indexStore = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
-			long now = (long) (DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalMilliseconds;
+			long now = System.DateTime.Now.Millisecond;
 			
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			// add time that is in the past
-			doc.Add(new Field("datefield", Lucene.Net.Documents.DateTools.TimeToString(now - 1000 * 100000, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.NOT_ANALYZED));
+			doc.Add(new Field("datefield", DateTools.TimeToString(now - 1000, DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.NOT_ANALYZED));
 			doc.Add(new Field("body", "Today is a very sunny day in New York City", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			writer.Optimize();
@@ -63,10 +66,10 @@
 			
 			// filter that should preserve matches
 			//DateFilter df1 = DateFilter.Before("datefield", now);
-			RangeFilter df1 = new RangeFilter("datefield", Lucene.Net.Documents.DateTools.TimeToString(now - 2000 * 100000, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Lucene.Net.Documents.DateTools.TimeToString(now, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), false, true);
+			TermRangeFilter df1 = new TermRangeFilter("datefield", DateTools.TimeToString(now - 2000, DateTools.Resolution.MILLISECOND), DateTools.TimeToString(now, DateTools.Resolution.MILLISECOND), false, true);
 			// filter that should discard matches
 			//DateFilter df2 = DateFilter.Before("datefield", now - 999999);
-			RangeFilter df2 = new RangeFilter("datefield", Lucene.Net.Documents.DateTools.TimeToString(0, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Lucene.Net.Documents.DateTools.TimeToString(now - 2000 * 100000, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), true, false);
+			TermRangeFilter df2 = new TermRangeFilter("datefield", DateTools.TimeToString(0, DateTools.Resolution.MILLISECOND), DateTools.TimeToString(now - 2000, DateTools.Resolution.MILLISECOND), true, false);
 			
 			// search something that doesn't exist with DateFilter
 			Query query1 = new TermQuery(new Term("body", "NoMatchForThis"));
@@ -100,17 +103,17 @@
 		
 		/// <summary> </summary>
 		[Test]
-		public virtual void  TestAfter()
+		public static void  TestAfter()
 		{
 			// create an index
 			RAMDirectory indexStore = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
-			long now = (long) (DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalMilliseconds;
+			long now = System.DateTime.Now.Millisecond;
 			
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			// add time that is in the future
-			doc.Add(new Field("datefield", Lucene.Net.Documents.DateTools.TimeToString(now + 888888, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.NOT_ANALYZED));
+			doc.Add(new Field("datefield", DateTools.TimeToString(now + 888888, DateTools.Resolution.MILLISECOND), Field.Store.YES, Field.Index.NOT_ANALYZED));
 			doc.Add(new Field("body", "Today is a very sunny day in New York City", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			writer.Optimize();
@@ -120,10 +123,10 @@
 			
 			// filter that should preserve matches
 			//DateFilter df1 = DateFilter.After("datefield", now);
-			RangeFilter df1 = new RangeFilter("datefield", Lucene.Net.Documents.DateTools.TimeToString(now, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Lucene.Net.Documents.DateTools.TimeToString(now + 999999, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), true, false);
+			TermRangeFilter df1 = new TermRangeFilter("datefield", DateTools.TimeToString(now, DateTools.Resolution.MILLISECOND), DateTools.TimeToString(now + 999999, DateTools.Resolution.MILLISECOND), true, false);
 			// filter that should discard matches
 			//DateFilter df2 = DateFilter.After("datefield", now + 999999);
-			RangeFilter df2 = new RangeFilter("datefield", Lucene.Net.Documents.DateTools.TimeToString(now + 999999, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), Lucene.Net.Documents.DateTools.TimeToString(now + 999999999, Lucene.Net.Documents.DateTools.Resolution.MILLISECOND), false, true);
+			TermRangeFilter df2 = new TermRangeFilter("datefield", DateTools.TimeToString(now + 999999, DateTools.Resolution.MILLISECOND), DateTools.TimeToString(now + 999999999, DateTools.Resolution.MILLISECOND), false, true);
 			
 			// search something that doesn't exist with DateFilter
 			Query query1 = new TermQuery(new Term("body", "NoMatchForThis"));

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDateSort.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDateSort.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDateSort.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDateSort.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,6 +19,7 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using DateTools = Lucene.Net.Documents.DateTools;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
@@ -26,7 +27,7 @@
 using QueryParser = Lucene.Net.QueryParsers.QueryParser;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -34,8 +35,8 @@
 	/// <summary> Test date sorting, i.e. auto-sorting of fields with type "long".
 	/// See http://issues.apache.org/jira/browse/LUCENE-1045 
 	/// </summary>
-	[TestFixture]
-	public class TestDateSort
+    [TestFixture]
+	public class TestDateSort:LuceneTestCase
 	{
 		
 		private const System.String TEXT_FIELD = "text";
@@ -44,37 +45,25 @@
 		private static Directory directory;
 		
 		[SetUp]
-		public virtual void  SetUp()
+		public override void  SetUp()
 		{
+			base.SetUp();
 			// Create an index writer.
 			directory = new RAMDirectory();
-            IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			
 			// oldest doc:
 			// Add the first document.  text = "Document 1"  dateTime = Oct 10 03:25:22 EDT 2007
-			writer.AddDocument(CreateDocument("Document 1", 633275835220000000L));
+			writer.AddDocument(CreateDocument("Document 1", 1192001122000L));
 			// Add the second document.  text = "Document 2"  dateTime = Oct 10 03:25:26 EDT 2007 
-			writer.AddDocument(CreateDocument("Document 2", 633275835260000000L));
+			writer.AddDocument(CreateDocument("Document 2", 1192001126000L));
 			// Add the third document.  text = "Document 3"  dateTime = Oct 11 07:12:13 EDT 2007 
-			writer.AddDocument(CreateDocument("Document 3", 633276835330000000L));
+			writer.AddDocument(CreateDocument("Document 3", 1192101133000L));
 			// Add the fourth document.  text = "Document 4"  dateTime = Oct 11 08:02:09 EDT 2007
-			writer.AddDocument(CreateDocument("Document 4", 633276865290000000L));
+			writer.AddDocument(CreateDocument("Document 4", 1192104129000L));
 			// latest doc:
 			// Add the fifth document.  text = "Document 5"  dateTime = Oct 12 13:25:43 EDT 2007
-			writer.AddDocument(CreateDocument("Document 5", 633277923430000000L));
-
-			//// oldest doc:
-			//// Add the first document.  text = "Document 1"  dateTime = Oct 10 03:25:22 EDT 2007
-			//writer.AddDocument(CreateDocument("Document 1", 1192001122000L));
-			//// Add the second document.  text = "Document 2"  dateTime = Oct 10 03:25:26 EDT 2007 
-			//writer.AddDocument(CreateDocument("Document 2", 1192001126000L));
-			//// Add the third document.  text = "Document 3"  dateTime = Oct 11 07:12:13 EDT 2007 
-			//writer.AddDocument(CreateDocument("Document 3", 1192101133000L));
-			//// Add the fourth document.  text = "Document 4"  dateTime = Oct 11 08:02:09 EDT 2007
-			//writer.AddDocument(CreateDocument("Document 4", 1192104129000L));
-			//// latest doc:
-			//// Add the fifth document.  text = "Document 5"  dateTime = Oct 12 13:25:43 EDT 2007
-			//writer.AddDocument(CreateDocument("Document 5", 1192209943000L));
+			writer.AddDocument(CreateDocument("Document 5", 1192209943000L));
 			
 			writer.Optimize();
 			writer.Close();
@@ -88,8 +77,8 @@
 			// Create a Sort object.  reverse is set to true.
 			// problem occurs only with SortField.AUTO:
 			Sort sort = new Sort(new SortField(DATE_TIME_FIELD, SortField.AUTO, true));
-
-			Lucene.Net.QueryParsers.QueryParser queryParser = new Lucene.Net.QueryParsers.QueryParser(TEXT_FIELD, new WhitespaceAnalyzer());
+			
+			QueryParser queryParser = new QueryParser(TEXT_FIELD, new WhitespaceAnalyzer());
 			Query query = queryParser.Parse("Document");
 			
 			// Execute the search and process the search results.
@@ -111,10 +100,7 @@
 			expectedOrder[3] = "Document 2";
 			expectedOrder[4] = "Document 1";
 			
-			for (int i = 0; i < expectedOrder.Length; i++)
-			{
-				Assert.AreEqual(expectedOrder[i], actualOrder[i]);
-			}
+			Assert.AreEqual(new System.Collections.ArrayList(expectedOrder), new System.Collections.ArrayList(actualOrder));
 		}
 		
 		private static Document CreateDocument(System.String text, long time)

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDisjunctionMaxQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDisjunctionMaxQuery.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDisjunctionMaxQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDisjunctionMaxQuery.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,6 +19,7 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
@@ -26,7 +27,6 @@
 using Term = Lucene.Net.Index.Term;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
@@ -35,8 +35,8 @@
 	/// <summary> Test of the DisjunctionMaxQuery.
 	/// 
 	/// </summary>
-	[TestFixture]
-	public class TestDisjunctionMaxQuery : LuceneTestCase
+    [TestFixture]
+	public class TestDisjunctionMaxQuery:LuceneTestCase
 	{
 		public TestDisjunctionMaxQuery()
 		{
@@ -58,10 +58,8 @@
 		/// http://issues.apache.org/jira/browse/LUCENE-323
 		/// </p>
 		/// </summary>
-		/// <author>  Williams
-		/// </author>
 		[Serializable]
-		private class TestSimilarity : DefaultSimilarity
+		private class TestSimilarity:DefaultSimilarity
 		{
 			
 			public TestSimilarity()
@@ -90,20 +88,19 @@
 		public IndexSearcher s;
 		
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
-			
 			base.SetUp();
 			
 			index = new RAMDirectory();
-            IndexWriter writer = new IndexWriter(index, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writer = new IndexWriter(index, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetSimilarity(sim);
 			
 			// hed is the most important field, dek is secondary
 			
 			// d1 is an "ok" match for:  albino elephant
 			{
-				Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
+				Document d1 = new Document();
 				d1.Add(new Field("id", "d1", Field.Store.YES, Field.Index.NOT_ANALYZED)); //Field.Keyword("id", "d1"));
 				d1.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("hed", "elephant"));
 				d1.Add(new Field("dek", "elephant", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("dek", "elephant"));
@@ -112,7 +109,7 @@
 			
 			// d2 is a "good" match for:  albino elephant
 			{
-				Lucene.Net.Documents.Document d2 = new Lucene.Net.Documents.Document();
+				Document d2 = new Document();
 				d2.Add(new Field("id", "d2", Field.Store.YES, Field.Index.NOT_ANALYZED)); //Field.Keyword("id", "d2"));
 				d2.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("hed", "elephant"));
 				d2.Add(new Field("dek", "albino", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("dek", "albino"));
@@ -122,7 +119,7 @@
 			
 			// d3 is a "better" match for:  albino elephant
 			{
-				Lucene.Net.Documents.Document d3 = new Lucene.Net.Documents.Document();
+				Document d3 = new Document();
 				d3.Add(new Field("id", "d3", Field.Store.YES, Field.Index.NOT_ANALYZED)); //Field.Keyword("id", "d3"));
 				d3.Add(new Field("hed", "albino", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("hed", "albino"));
 				d3.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("hed", "elephant"));
@@ -131,7 +128,7 @@
 			
 			// d4 is the "best" match for:  albino elephant
 			{
-				Lucene.Net.Documents.Document d4 = new Lucene.Net.Documents.Document();
+				Document d4 = new Document();
 				d4.Add(new Field("id", "d4", Field.Store.YES, Field.Index.NOT_ANALYZED)); //Field.Keyword("id", "d4"));
 				d4.Add(new Field("hed", "albino", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("hed", "albino"));
 				d4.Add(new Field("hed", "elephant", Field.Store.YES, Field.Index.ANALYZED)); //Field.Text("hed", "elephant"));
@@ -156,11 +153,11 @@
 			QueryUtils.Check(dq, s);
 			
 			Weight dw = dq.Weight(s);
-			Scorer ds = dw.Scorer(r);
-			bool skipOk = ds.SkipTo(3);
+			Scorer ds = dw.Scorer(r, true, false);
+			bool skipOk = ds.Advance(3) != DocIdSetIterator.NO_MORE_DOCS;
 			if (skipOk)
 			{
-				Assert.Fail("firsttime skipTo found a match? ... " + r.Document(ds.Doc()).Get("id"));
+				Assert.Fail("firsttime skipTo found a match? ... " + r.Document(ds.DocID()).Get("id"));
 			}
 		}
 		
@@ -174,13 +171,11 @@
 			QueryUtils.Check(dq, s);
 			
 			Weight dw = dq.Weight(s);
-			Scorer ds = dw.Scorer(r);
-			Assert.IsTrue(ds.SkipTo(3), "firsttime skipTo found no match");
-			Assert.AreEqual("d4", r.Document(ds.Doc()).Get("id"), "found wrong docid");
+			Scorer ds = dw.Scorer(r, true, false);
+			Assert.IsTrue(ds.Advance(3) != DocIdSetIterator.NO_MORE_DOCS, "firsttime skipTo found no match");
+			Assert.AreEqual("d4", r.Document(ds.DocID()).Get("id"), "found wrong docid");
 		}
 		
-		
-		
 		[Test]
 		public virtual void  TestSimpleEqualScores1()
 		{
@@ -217,7 +212,7 @@
 			q.Add(Tq("dek", "albino"));
 			q.Add(Tq("dek", "elephant"));
 			QueryUtils.Check(q, s);
-
+			
 			
 			ScoreDoc[] h = s.Search(q, null, 1000).scoreDocs;
 			
@@ -247,7 +242,7 @@
 			q.Add(Tq("dek", "albino"));
 			q.Add(Tq("dek", "elephant"));
 			QueryUtils.Check(q, s);
-
+			
 			
 			ScoreDoc[] h = s.Search(q, null, 1000).scoreDocs;
 			
@@ -257,7 +252,7 @@
 				float score = h[0].score;
 				for (int i = 1; i < h.Length; i++)
 				{
-                    Assert.AreEqual(score, h[i].score, SCORE_COMP_THRESH, "score #" + i + " is not the same");
+					Assert.AreEqual(score, h[i].score, SCORE_COMP_THRESH, "score #" + i + " is not the same");
 				}
 			}
 			catch (System.ApplicationException e)
@@ -282,7 +277,7 @@
 			try
 			{
 				Assert.AreEqual(3, h.Length, "3 docs should match " + q.ToString());
-				Assert.AreEqual("d2", s.Doc(h[0].doc).Get("id"), "wrong first");
+				Assert.AreEqual(s.Doc(h[0].doc).Get("id"), "wrong first", "d2");
 				float score0 = h[0].score;
 				float score1 = h[1].score;
 				float score2 = h[2].score;
@@ -305,14 +300,14 @@
 				DisjunctionMaxQuery q1 = new DisjunctionMaxQuery(0.0f);
 				q1.Add(Tq("hed", "albino"));
 				q1.Add(Tq("dek", "albino"));
-				q.Add(q1, BooleanClause.Occur.MUST); //false,false);
+				q.Add(q1, BooleanClause.Occur.MUST); //true,false);
 				QueryUtils.Check(q1, s);
 			}
 			{
 				DisjunctionMaxQuery q2 = new DisjunctionMaxQuery(0.0f);
 				q2.Add(Tq("hed", "elephant"));
 				q2.Add(Tq("dek", "elephant"));
-				q.Add(q2, BooleanClause.Occur.MUST); //false,false);
+				q.Add(q2, BooleanClause.Occur.MUST); //true,false);
 				QueryUtils.Check(q2, s);
 			}
 			
@@ -336,6 +331,7 @@
 			}
 		}
 		
+		
 		[Test]
 		public virtual void  TestBooleanOptionalNoTiebreaker()
 		{
@@ -378,6 +374,7 @@
 			}
 		}
 		
+		
 		[Test]
 		public virtual void  TestBooleanOptionalWithTiebreaker()
 		{
@@ -426,11 +423,12 @@
 			}
 			catch (System.ApplicationException e)
 			{
-                PrintHits("testBooleanOptionalWithTiebreaker", h, s);
+				PrintHits("testBooleanOptionalWithTiebreaker", h, s);
 				throw e;
 			}
 		}
 		
+		
 		[Test]
 		public virtual void  TestBooleanOptionalWithTiebreakerAndBoost()
 		{
@@ -479,11 +477,17 @@
 			}
 			catch (System.ApplicationException e)
 			{
-                PrintHits("testBooleanOptionalWithTiebreakerAndBoost", h, s);
+				PrintHits("testBooleanOptionalWithTiebreakerAndBoost", h, s);
 				throw e;
 			}
 		}
 		
+		
+		
+		
+		
+		
+		
 		/// <summary>macro </summary>
 		protected internal virtual Query Tq(System.String f, System.String t)
 		{
@@ -497,14 +501,15 @@
 			return q;
 		}
 		
-		protected internal virtual void  PrintHits(System.String test, ScoreDoc[] h, Searcher s)
+		
+		protected internal virtual void  PrintHits(System.String test, ScoreDoc[] h, Searcher searcher)
 		{
 			
 			System.Console.Error.WriteLine("------- " + test + " -------");
 			
 			for (int i = 0; i < h.Length; i++)
 			{
-				Lucene.Net.Documents.Document d = s.Doc(h[i].doc);
+				Document d = searcher.Doc(h[i].doc);
 				float score = h[i].score;
 				System.Console.Error.WriteLine("#" + i + ": {0.000000000}" + score + " - " + d.Get("id"));
 			}

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestDocBoost.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDocBoost.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDocBoost.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDocBoost.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,11 +19,12 @@
 
 using NUnit.Framework;
 
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Lucene.Net.Documents;
+using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
@@ -33,14 +34,14 @@
 	/// 
 	/// 
 	/// </summary>
-	/// <version>  $Revision: 583534 $
+	/// <version>  $Revision: 787772 $
 	/// </version>
-	[TestFixture]
-	public class TestDocBoost : LuceneTestCase
+    [TestFixture]
+	public class TestDocBoost:LuceneTestCase
 	{
-		private class AnonymousClassHitCollector : HitCollector
+		private class AnonymousClassCollector:Collector
 		{
-			public AnonymousClassHitCollector(float[] scores, TestDocBoost enclosingInstance)
+			public AnonymousClassCollector(float[] scores, TestDocBoost enclosingInstance)
 			{
 				InitBlock(scores, enclosingInstance);
 			}
@@ -59,14 +60,31 @@
 				}
 				
 			}
-			public override void  Collect(int doc, float score)
+			private int base_Renamed = 0;
+			private Scorer scorer;
+			public override void  SetScorer(Scorer scorer)
+			{
+				this.scorer = scorer;
+			}
+			public override void  Collect(int doc)
+			{
+				scores[doc + base_Renamed] = scorer.Score();
+			}
+			public override void  SetNextReader(IndexReader reader, int docBase)
+			{
+				base_Renamed = docBase;
+			}
+			public override bool AcceptsDocsOutOfOrder()
 			{
-				scores[doc] = score;
+				return true;
 			}
 		}
+		public TestDocBoost(System.String name):base(name)
+		{
+		}
 		
 		[Test]
-		public virtual void  TestDocBoost_Renamed_Method()
+		public virtual void  TestDocBoost_Renamed()
 		{
 			RAMDirectory store = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
@@ -75,10 +93,10 @@
 			Fieldable f2 = new Field("field", "word", Field.Store.YES, Field.Index.ANALYZED);
 			f2.SetBoost(2.0f);
 			
-			Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
-			Lucene.Net.Documents.Document d2 = new Lucene.Net.Documents.Document();
-			Lucene.Net.Documents.Document d3 = new Lucene.Net.Documents.Document();
-			Lucene.Net.Documents.Document d4 = new Lucene.Net.Documents.Document();
+			Document d1 = new Document();
+			Document d2 = new Document();
+			Document d3 = new Document();
+			Document d4 = new Document();
 			d3.SetBoost(3.0f);
 			d4.SetBoost(2.0f);
 			
@@ -96,7 +114,7 @@
 			
 			float[] scores = new float[4];
 			
-			new IndexSearcher(store).Search(new TermQuery(new Term("field", "word")), new AnonymousClassHitCollector(scores, this));
+			new IndexSearcher(store).Search(new TermQuery(new Term("field", "word")), new AnonymousClassCollector(scores, this));
 			
 			float lastScore = 0.0f;
 			

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestDocIdSet.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestDocIdSet.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestDocIdSet.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestDocIdSet.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,245 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using Index = Lucene.Net.Documents.Field.Index;
+using Store = Lucene.Net.Documents.Field.Store;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using MaxFieldLength = Lucene.Net.Index.IndexWriter.MaxFieldLength;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using _TestUtil = Lucene.Net.Util._TestUtil;
+
+namespace Lucene.Net.Search
+{
+	
+    [TestFixture]
+	public class TestDocIdSet:LuceneTestCase
+	{
+		private class AnonymousClassDocIdSet_Renamed_Class:DocIdSet
+		{
+			public AnonymousClassDocIdSet_Renamed_Class(int maxdoc, TestDocIdSet enclosingInstance)
+			{
+				InitBlock(maxdoc, enclosingInstance);
+			}
+			private class AnonymousClassDocIdSetIterator:DocIdSetIterator
+			{
+				public AnonymousClassDocIdSetIterator(int maxdoc, AnonymousClassDocIdSet_Renamed_Class enclosingInstance)
+				{
+					InitBlock(maxdoc, enclosingInstance);
+				}
+				private void  InitBlock(int maxdoc, AnonymousClassDocIdSet_Renamed_Class enclosingInstance)
+				{
+					this.maxdoc = maxdoc;
+					this.enclosingInstance = enclosingInstance;
+				}
+				private int maxdoc;
+				private AnonymousClassDocIdSet_Renamed_Class enclosingInstance;
+				public AnonymousClassDocIdSet_Renamed_Class Enclosing_Instance
+				{
+					get
+					{
+						return enclosingInstance;
+					}
+					
+				}
+				
+				internal int docid = - 1;
+				
+				/** @deprecated use {@link #DocID()} instead. */
+				public override int Doc()
+				{
+					return docid;
+				}
+				
+				public override int DocID()
+				{
+					return docid;
+				}
+				
+				/// <deprecated> use {@link #NextDoc()} instead. 
+				/// </deprecated>
+				public override bool Next()
+				{
+					return NextDoc() != NO_MORE_DOCS;
+				}
+				
+				//@Override
+				public override int NextDoc()
+				{
+					docid++;
+					return docid < maxdoc?docid:(docid = NO_MORE_DOCS);
+				}
+				
+				/// <deprecated> use {@link #Advance(int)} instead. 
+				/// </deprecated>
+				public override bool SkipTo(int target)
+				{
+					return Advance(target) != NO_MORE_DOCS;
+				}
+				
+				//@Override
+				public override int Advance(int target)
+				{
+					while (NextDoc() < target)
+					{
+					}
+					return docid;
+				}
+			}
+			private void  InitBlock(int maxdoc, TestDocIdSet enclosingInstance)
+			{
+				this.maxdoc = maxdoc;
+				this.enclosingInstance = enclosingInstance;
+			}
+			private int maxdoc;
+			private TestDocIdSet enclosingInstance;
+			public TestDocIdSet Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			
+			// @Override
+			public override DocIdSetIterator Iterator()
+			{
+				return new AnonymousClassDocIdSetIterator(maxdoc, this);
+			}
+		}
+		private class AnonymousClassFilteredDocIdSet:FilteredDocIdSet
+		{
+			private void  InitBlock(TestDocIdSet enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestDocIdSet enclosingInstance;
+			public TestDocIdSet Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal AnonymousClassFilteredDocIdSet(TestDocIdSet enclosingInstance, Lucene.Net.Search.DocIdSet Param1):base(Param1)
+			{
+				InitBlock(enclosingInstance);
+			}
+			// @Override
+			public /*protected internal*/ override bool Match(int docid)
+			{
+				return docid % 2 == 0; //validate only even docids
+			}
+		}
+		[Serializable]
+		private class AnonymousClassFilter:Filter
+		{
+			public AnonymousClassFilter(TestDocIdSet enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestDocIdSet enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestDocIdSet enclosingInstance;
+			public TestDocIdSet Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			public override DocIdSet GetDocIdSet(IndexReader reader)
+			{
+				return null;
+			}
+		}
+        [Test]
+		public virtual void  TestFilteredDocIdSet()
+		{
+			int maxdoc = 10;
+			DocIdSet innerSet = new AnonymousClassDocIdSet_Renamed_Class(maxdoc, this);
+			
+			
+			DocIdSet filteredSet = new AnonymousClassFilteredDocIdSet(this, innerSet);
+			
+			DocIdSetIterator iter = filteredSet.Iterator();
+			System.Collections.ArrayList list = new System.Collections.ArrayList();
+			int doc = iter.Advance(3);
+			if (doc != DocIdSetIterator.NO_MORE_DOCS)
+			{
+				list.Add((System.Int32) doc);
+				while ((doc = iter.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
+				{
+					list.Add((System.Int32) doc);
+				}
+			}
+			
+			int[] docs = new int[list.Count];
+			int c = 0;
+			System.Collections.IEnumerator intIter = list.GetEnumerator();
+			while (intIter.MoveNext())
+			{
+				docs[c++] = ((System.Int32) intIter.Current);
+			}
+			int[] answer = new int[]{4, 6, 8};
+			bool same = SupportClass.CollectionsHelper.Equals(answer, docs);
+			if (!same)
+			{
+				System.Console.Out.WriteLine("answer: " + _TestUtil.ArrayToString(answer));
+				System.Console.Out.WriteLine("gotten: " + _TestUtil.ArrayToString(docs));
+				Assert.Fail();
+			}
+		}
+		
+        [Test]
+		public virtual void  TestNullDocIdSet()
+		{
+			// Tests that if a Filter produces a null DocIdSet, which is given to
+			// IndexSearcher, everything works fine. This came up in LUCENE-1754.
+			Directory dir = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), MaxFieldLength.UNLIMITED);
+			Document doc = new Document();
+			doc.Add(new Field("c", "val", Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
+			writer.AddDocument(doc);
+			writer.Close();
+			
+			// First verify the document is searchable.
+			IndexSearcher searcher = new IndexSearcher(dir, true);
+			Assert.AreEqual(1, searcher.Search(new MatchAllDocsQuery(), 10).totalHits);
+			
+			// Now search w/ a Filter which returns a null DocIdSet
+			Filter f = new AnonymousClassFilter(this);
+			
+			Assert.AreEqual(0, searcher.Search(new MatchAllDocsQuery(), f, 10).totalHits);
+			searcher.Close();
+		}
+	}
+}
\ No newline at end of file