You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ar...@apache.org on 2009/11/03 19:06:38 UTC

svn commit: r832486 [17/29] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene...

Added: incubator/lucene.net/trunk/C#/src/Test/Search/Payloads/TestPayloadNearQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/Payloads/TestPayloadNearQuery.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/Payloads/TestPayloadNearQuery.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/Payloads/TestPayloadNearQuery.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,293 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using LowerCaseTokenizer = Lucene.Net.Analysis.LowerCaseTokenizer;
+using Token = Lucene.Net.Analysis.Token;
+using TokenFilter = Lucene.Net.Analysis.TokenFilter;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
+using PayloadAttribute = Lucene.Net.Analysis.Tokenattributes.PayloadAttribute;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Payload = Lucene.Net.Index.Payload;
+using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using DefaultSimilarity = Lucene.Net.Search.DefaultSimilarity;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using QueryUtils = Lucene.Net.Search.QueryUtils;
+using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+using Searcher = Lucene.Net.Search.Searcher;
+using TopDocs = Lucene.Net.Search.TopDocs;
+using SpanQuery = Lucene.Net.Search.Spans.SpanQuery;
+using English = Lucene.Net.Util.English;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search.Payloads
+{
+	
+	
+    [TestFixture]
+	public class TestPayloadNearQuery:LuceneTestCase
+	{
+		private void  InitBlock()
+		{
+			similarity = new BoostingSimilarity();
+		}
+		private IndexSearcher searcher;
+		private BoostingSimilarity similarity;
+		private byte[] payload2 = new byte[]{2};
+		private byte[] payload4 = new byte[]{4};
+		
+		public TestPayloadNearQuery(System.String s):base(s)
+		{
+			InitBlock();
+		}
+		
+		private class PayloadAnalyzer:Analyzer
+		{
+			public PayloadAnalyzer(TestPayloadNearQuery enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestPayloadNearQuery enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestPayloadNearQuery enclosingInstance;
+			public TestPayloadNearQuery Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
+			{
+				TokenStream result = new LowerCaseTokenizer(reader);
+				result = new PayloadFilter(enclosingInstance, result, fieldName);
+				return result;
+			}
+		}
+		
+		private class PayloadFilter:TokenFilter
+		{
+			private void  InitBlock(TestPayloadNearQuery enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestPayloadNearQuery enclosingInstance;
+			public TestPayloadNearQuery Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal System.String fieldName;
+			internal int numSeen = 0;
+			protected internal PayloadAttribute payAtt;
+			
+			public PayloadFilter(TestPayloadNearQuery enclosingInstance, TokenStream input, System.String fieldName):base(input)
+			{
+				InitBlock(enclosingInstance);
+				this.fieldName = fieldName;
+				payAtt = (PayloadAttribute) AddAttribute(typeof(PayloadAttribute));
+			}
+			
+			public override bool IncrementToken()
+			{
+				bool result = false;
+				if (input.IncrementToken() == true)
+				{
+					if (numSeen % 2 == 0)
+					{
+						payAtt.SetPayload(new Payload(Enclosing_Instance.payload2));
+					}
+					else
+					{
+						payAtt.SetPayload(new Payload(Enclosing_Instance.payload4));
+					}
+					numSeen++;
+					result = true;
+				}
+				return result;
+			}
+		}
+		
+		private PayloadNearQuery NewPhraseQuery(System.String fieldName, System.String phrase, bool inOrder)
+		{
+			int n;
+			System.String[] words = System.Text.RegularExpressions.Regex.Split(phrase, "[\\s]+");
+			SpanQuery[] clauses = new SpanQuery[words.Length];
+			for (int i = 0; i < clauses.Length; i++)
+			{
+				clauses[i] = new PayloadTermQuery(new Term(fieldName, words[i]), new AveragePayloadFunction());
+			}
+			return new PayloadNearQuery(clauses, 0, inOrder);
+		}
+		
+		[SetUp]
+		public override void  SetUp()
+		{
+			base.SetUp();
+			RAMDirectory directory = new RAMDirectory();
+			PayloadAnalyzer analyzer = new PayloadAnalyzer(this);
+			IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetSimilarity(similarity);
+			//writer.infoStream = System.out;
+			for (int i = 0; i < 1000; i++)
+			{
+				Document doc = new Document();
+				doc.Add(new Field("field", English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
+				writer.AddDocument(doc);
+			}
+			writer.Optimize();
+			writer.Close();
+			
+			searcher = new IndexSearcher(directory, true);
+			searcher.SetSimilarity(similarity);
+		}
+		
+        [Test]
+		public virtual void  Test()
+		{
+			PayloadNearQuery query;
+			TopDocs hits;
+			
+			query = NewPhraseQuery("field", "twenty two", true);
+			QueryUtils.Check(query);
+			
+			// all 10 hits should have score = 3 because adjacent terms have payloads of 2,4
+			// and all the similarity factors are set to 1
+			hits = searcher.Search(query, null, 100);
+			Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+			Assert.IsTrue(hits.totalHits == 10, "should be 10 hits");
+			for (int j = 0; j < hits.scoreDocs.Length; j++)
+			{
+				ScoreDoc doc = hits.scoreDocs[j];
+				Assert.IsTrue(doc.score == 3, doc.score + " does not equal: " + 3);
+			}
+			for (int i = 1; i < 10; i++)
+			{
+				query = NewPhraseQuery("field", English.IntToEnglish(i) + " hundred", true);
+				// all should have score = 3 because adjacent terms have payloads of 2,4
+				// and all the similarity factors are set to 1
+				hits = searcher.Search(query, null, 100);
+				Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+				Assert.IsTrue(hits.totalHits == 100, "should be 100 hits");
+				for (int j = 0; j < hits.scoreDocs.Length; j++)
+				{
+					ScoreDoc doc = hits.scoreDocs[j];
+					//				System.out.println("Doc: " + doc.toString());
+					//				System.out.println("Explain: " + searcher.explain(query, doc.doc));
+					Assert.IsTrue(doc.score == 3, doc.score + " does not equal: " + 3);
+				}
+			}
+		}
+		
+        [Test]
+		public virtual void  TestLongerSpan()
+		{
+			PayloadNearQuery query;
+			TopDocs hits;
+			query = NewPhraseQuery("field", "nine hundred ninety nine", true);
+			hits = searcher.Search(query, null, 100);
+			ScoreDoc doc = hits.scoreDocs[0];
+			//		System.out.println("Doc: " + doc.toString());
+			//		System.out.println("Explain: " + searcher.explain(query, doc.doc));
+			Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+			Assert.IsTrue(hits.totalHits == 1, "there should only be one hit");
+			// should have score = 3 because adjacent terms have payloads of 2,4
+			Assert.IsTrue(doc.score == 3, doc.score + " does not equal: " + 3);
+		}
+		
+        [Test]
+		public virtual void  TestComplexNested()
+		{
+			PayloadNearQuery query;
+			TopDocs hits;
+			
+			// combine ordered and unordered spans with some nesting to make sure all payloads are counted
+			
+			SpanQuery q1 = NewPhraseQuery("field", "nine hundred", true);
+			SpanQuery q2 = NewPhraseQuery("field", "ninety nine", true);
+			SpanQuery q3 = NewPhraseQuery("field", "nine ninety", false);
+			SpanQuery q4 = NewPhraseQuery("field", "hundred nine", false);
+			SpanQuery[] clauses = new SpanQuery[]{new PayloadNearQuery(new SpanQuery[]{q1, q2}, 0, true), new PayloadNearQuery(new SpanQuery[]{q3, q4}, 0, false)};
+			query = new PayloadNearQuery(clauses, 0, false);
+			hits = searcher.Search(query, null, 100);
+			Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+			// should be only 1 hit - doc 999
+			Assert.IsTrue(hits.scoreDocs.Length == 1, "should only be one hit");
+			// the score should be 3 - the average of all the underlying payloads
+			ScoreDoc doc = hits.scoreDocs[0];
+			//		System.out.println("Doc: " + doc.toString());
+			//		System.out.println("Explain: " + searcher.explain(query, doc.doc));
+			Assert.IsTrue(doc.score == 3, doc.score + " does not equal: " + 3);
+		}
+		// must be static for weight serialization tests 
+		[Serializable]
+		internal class BoostingSimilarity:DefaultSimilarity
+		{
+			
+			// TODO: Remove warning after API has been finalized
+			public override float ScorePayload(int docId, System.String fieldName, int start, int end, byte[] payload, int offset, int length)
+			{
+				//we know it is size 4 here, so ignore the offset/length
+				return payload[0];
+			}
+			//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+			//Make everything else 1 so we see the effect of the payload
+			//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+			public override float LengthNorm(System.String fieldName, int numTerms)
+			{
+				return 1;
+			}
+			
+			public override float QueryNorm(float sumOfSquaredWeights)
+			{
+				return 1;
+			}
+			
+			public override float SloppyFreq(int distance)
+			{
+				return 1;
+			}
+			
+			public override float Coord(int overlap, int maxOverlap)
+			{
+				return 1;
+			}
+			public override float Tf(float freq)
+			{
+				return 1;
+			}
+			// idf used for phrase queries
+			public override float Idf(System.Collections.ICollection terms, Searcher searcher)
+			{
+				return 1;
+			}
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Search/Payloads/TestPayloadTermQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/Payloads/TestPayloadTermQuery.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/Payloads/TestPayloadTermQuery.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/Payloads/TestPayloadTermQuery.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,402 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using LowerCaseTokenizer = Lucene.Net.Analysis.LowerCaseTokenizer;
+using TokenFilter = Lucene.Net.Analysis.TokenFilter;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
+using PayloadAttribute = Lucene.Net.Analysis.Tokenattributes.PayloadAttribute;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Payload = Lucene.Net.Index.Payload;
+using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using BooleanClause = Lucene.Net.Search.BooleanClause;
+using BooleanQuery = Lucene.Net.Search.BooleanQuery;
+using CheckHits = Lucene.Net.Search.CheckHits;
+using DefaultSimilarity = Lucene.Net.Search.DefaultSimilarity;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using QueryUtils = Lucene.Net.Search.QueryUtils;
+using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+using TopDocs = Lucene.Net.Search.TopDocs;
+using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
+using TermSpans = Lucene.Net.Search.Spans.TermSpans;
+using English = Lucene.Net.Util.English;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search.Payloads
+{
+	
+	
+	/// <summary> 
+	/// 
+	/// 
+	/// </summary>
+    [TestFixture]
+	public class TestPayloadTermQuery:LuceneTestCase
+	{
+		private void  InitBlock()
+		{
+			similarity = new BoostingSimilarity();
+		}
+		private IndexSearcher searcher;
+		private BoostingSimilarity similarity;
+		private byte[] payloadField = new byte[]{1};
+		private byte[] payloadMultiField1 = new byte[]{2};
+		private byte[] payloadMultiField2 = new byte[]{4};
+		protected internal RAMDirectory directory;
+		
+		public TestPayloadTermQuery(System.String s):base(s)
+		{
+			InitBlock();
+		}
+		
+		private class PayloadAnalyzer:Analyzer
+		{
+			public PayloadAnalyzer(TestPayloadTermQuery enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestPayloadTermQuery enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestPayloadTermQuery enclosingInstance;
+			public TestPayloadTermQuery Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			
+			
+			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
+			{
+				TokenStream result = new LowerCaseTokenizer(reader);
+				result = new PayloadFilter(enclosingInstance, result, fieldName);
+				return result;
+			}
+		}
+		
+		private class PayloadFilter:TokenFilter
+		{
+			private void  InitBlock(TestPayloadTermQuery enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestPayloadTermQuery enclosingInstance;
+			public TestPayloadTermQuery Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal System.String fieldName;
+			internal int numSeen = 0;
+			
+			internal PayloadAttribute payloadAtt;
+			
+			public PayloadFilter(TestPayloadTermQuery enclosingInstance, TokenStream input, System.String fieldName):base(input)
+			{
+				InitBlock(enclosingInstance);
+				this.fieldName = fieldName;
+				payloadAtt = (PayloadAttribute) AddAttribute(typeof(PayloadAttribute));
+			}
+			
+			public override bool IncrementToken()
+			{
+				bool hasNext = input.IncrementToken();
+				if (hasNext)
+				{
+					if (fieldName.Equals("field"))
+					{
+						payloadAtt.SetPayload(new Payload(Enclosing_Instance.payloadField));
+					}
+					else if (fieldName.Equals("multiField"))
+					{
+						if (numSeen % 2 == 0)
+						{
+							payloadAtt.SetPayload(new Payload(Enclosing_Instance.payloadMultiField1));
+						}
+						else
+						{
+							payloadAtt.SetPayload(new Payload(Enclosing_Instance.payloadMultiField2));
+						}
+						numSeen++;
+					}
+					return true;
+				}
+				else
+				{
+					return false;
+				}
+			}
+		}
+		
+		[SetUp]
+		public override void  SetUp()
+		{
+			base.SetUp();
+			directory = new RAMDirectory();
+			PayloadAnalyzer analyzer = new PayloadAnalyzer(this);
+			IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetSimilarity(similarity);
+			//writer.infoStream = System.out;
+			for (int i = 0; i < 1000; i++)
+			{
+				Document doc = new Document();
+				Field noPayloadField = new Field(PayloadHelper.NO_PAYLOAD_FIELD, English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED);
+				//noPayloadField.setBoost(0);
+				doc.Add(noPayloadField);
+				doc.Add(new Field("field", English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
+				doc.Add(new Field("multiField", English.IntToEnglish(i) + "  " + English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
+				writer.AddDocument(doc);
+			}
+			writer.Optimize();
+			writer.Close();
+			
+			searcher = new IndexSearcher(directory, true);
+			searcher.SetSimilarity(similarity);
+		}
+		
+        [Test]
+		public virtual void  Test()
+		{
+			PayloadTermQuery query = new PayloadTermQuery(new Term("field", "seventy"), new MaxPayloadFunction());
+			TopDocs hits = searcher.Search(query, null, 100);
+			Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+			Assert.IsTrue(hits.totalHits == 100, "hits Size: " + hits.totalHits + " is not: " + 100);
+			
+			//they should all have the exact same score, because they all contain seventy once, and we set
+			//all the other similarity factors to be 1
+			
+			Assert.IsTrue(hits.GetMaxScore() == 1, hits.GetMaxScore() + " does not equal: " + 1);
+			for (int i = 0; i < hits.scoreDocs.Length; i++)
+			{
+				ScoreDoc doc = hits.scoreDocs[i];
+				Assert.IsTrue(doc.score == 1, doc.score + " does not equal: " + 1);
+			}
+			CheckHits.CheckExplanations(query, PayloadHelper.FIELD, searcher, true);
+			Lucene.Net.Search.Spans.Spans spans = query.GetSpans(searcher.GetIndexReader());
+			Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+			Assert.IsTrue(spans is TermSpans, "spans is not an instanceof " + typeof(TermSpans));
+            /*float score = hits.score(0);
+            for (int i =1; i < hits.length(); i++)
+            {
+            Assert.IsTrue(score == hits.score(i), "scores are not equal and they should be");
+            }*/
+        }
+		
+        [Test]
+		public virtual void  TestQuery()
+		{
+			PayloadTermQuery boostingFuncTermQuery = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"), new MaxPayloadFunction());
+			QueryUtils.Check(boostingFuncTermQuery);
+			
+			SpanTermQuery spanTermQuery = new SpanTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"));
+			
+			Assert.IsTrue(boostingFuncTermQuery.Equals(spanTermQuery) == spanTermQuery.Equals(boostingFuncTermQuery));
+			
+			PayloadTermQuery boostingFuncTermQuery2 = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"), new AveragePayloadFunction());
+			
+			QueryUtils.CheckUnequal(boostingFuncTermQuery, boostingFuncTermQuery2);
+		}
+		
+        [Test]
+		public virtual void  TestMultipleMatchesPerDoc()
+		{
+			PayloadTermQuery query = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"), new MaxPayloadFunction());
+			TopDocs hits = searcher.Search(query, null, 100);
+			Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+			Assert.IsTrue(hits.totalHits == 100, "hits Size: " + hits.totalHits + " is not: " + 100);
+			
+			//they should all have the exact same score, because they all contain seventy once, and we set
+			//all the other similarity factors to be 1
+			
+			//System.out.println("Hash: " + seventyHash + " Twice Hash: " + 2*seventyHash);
+			Assert.IsTrue(hits.GetMaxScore() == 4.0, hits.GetMaxScore() + " does not equal: " + 4.0);
+			//there should be exactly 10 items that score a 4, all the rest should score a 2
+			//The 10 items are: 70 + i*100 where i in [0-9]
+			int numTens = 0;
+			for (int i = 0; i < hits.scoreDocs.Length; i++)
+			{
+				ScoreDoc doc = hits.scoreDocs[i];
+				if (doc.doc % 10 == 0)
+				{
+					numTens++;
+					Assert.IsTrue(doc.score == 4.0, doc.score + " does not equal: " + 4.0);
+				}
+				else
+				{
+					Assert.IsTrue(doc.score == 2, doc.score + " does not equal: " + 2);
+				}
+			}
+			Assert.IsTrue(numTens == 10, numTens + " does not equal: " + 10);
+			CheckHits.CheckExplanations(query, "field", searcher, true);
+			Lucene.Net.Search.Spans.Spans spans = query.GetSpans(searcher.GetIndexReader());
+			Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+			Assert.IsTrue(spans is TermSpans, "spans is not an instanceof " + typeof(TermSpans));
+			//should be two matches per document
+			int count = 0;
+			//100 hits times 2 matches per hit, we should have 200 in count
+			while (spans.Next())
+			{
+				count++;
+			}
+			Assert.IsTrue(count == 200, count + " does not equal: " + 200);
+		}
+		
+		//Set includeSpanScore to false, in which case just the payload score comes through.
+        [Test]
+		public virtual void  TestIgnoreSpanScorer()
+		{
+			PayloadTermQuery query = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"), new MaxPayloadFunction(), false);
+			
+			IndexSearcher theSearcher = new IndexSearcher(directory, true);
+			theSearcher.SetSimilarity(new FullSimilarity());
+			TopDocs hits = searcher.Search(query, null, 100);
+			Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+			Assert.IsTrue(hits.totalHits == 100, "hits Size: " + hits.totalHits + " is not: " + 100);
+			
+			//they should all have the exact same score, because they all contain seventy once, and we set
+			//all the other similarity factors to be 1
+			
+			//System.out.println("Hash: " + seventyHash + " Twice Hash: " + 2*seventyHash);
+			Assert.IsTrue(hits.GetMaxScore() == 4.0, hits.GetMaxScore() + " does not equal: " + 4.0);
+			//there should be exactly 10 items that score a 4, all the rest should score a 2
+			//The 10 items are: 70 + i*100 where i in [0-9]
+			int numTens = 0;
+			for (int i = 0; i < hits.scoreDocs.Length; i++)
+			{
+				ScoreDoc doc = hits.scoreDocs[i];
+				if (doc.doc % 10 == 0)
+				{
+					numTens++;
+					Assert.IsTrue(doc.score == 4.0, doc.score + " does not equal: " + 4.0);
+				}
+				else
+				{
+					Assert.IsTrue(doc.score == 2, doc.score + " does not equal: " + 2);
+				}
+			}
+			Assert.IsTrue(numTens == 10, numTens + " does not equal: " + 10);
+			CheckHits.CheckExplanations(query, "field", searcher, true);
+			Lucene.Net.Search.Spans.Spans spans = query.GetSpans(searcher.GetIndexReader());
+			Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
+			Assert.IsTrue(spans is TermSpans, "spans is not an instanceof " + typeof(TermSpans));
+			//should be two matches per document
+			int count = 0;
+			//100 hits times 2 matches per hit, we should have 200 in count
+			while (spans.Next())
+			{
+				count++;
+			}
+		}
+		
+        [Test]
+		public virtual void  TestNoMatch()
+		{
+			PayloadTermQuery query = new PayloadTermQuery(new Term(PayloadHelper.FIELD, "junk"), new MaxPayloadFunction());
+			TopDocs hits = searcher.Search(query, null, 100);
+			Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+			Assert.IsTrue(hits.totalHits == 0, "hits Size: " + hits.totalHits + " is not: " + 0);
+		}
+		
+        [Test]
+		public virtual void  TestNoPayload()
+		{
+			PayloadTermQuery q1 = new PayloadTermQuery(new Term(PayloadHelper.NO_PAYLOAD_FIELD, "zero"), new MaxPayloadFunction());
+			PayloadTermQuery q2 = new PayloadTermQuery(new Term(PayloadHelper.NO_PAYLOAD_FIELD, "foo"), new MaxPayloadFunction());
+			BooleanClause c1 = new BooleanClause(q1, BooleanClause.Occur.MUST);
+			BooleanClause c2 = new BooleanClause(q2, BooleanClause.Occur.MUST_NOT);
+			BooleanQuery query = new BooleanQuery();
+			query.Add(c1);
+			query.Add(c2);
+			TopDocs hits = searcher.Search(query, null, 100);
+			Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
+			Assert.IsTrue(hits.totalHits == 1, "hits Size: " + hits.totalHits + " is not: " + 1);
+			int[] results = new int[1];
+			results[0] = 0; //hits.scoreDocs[0].doc;
+			CheckHits.CheckHitCollector(query, PayloadHelper.NO_PAYLOAD_FIELD, searcher, results);
+		}
+		
+		// must be static for weight serialization tests 
+		[Serializable]
+		internal class BoostingSimilarity:DefaultSimilarity
+		{
+			
+			// TODO: Remove warning after API has been finalized
+			public override float ScorePayload(int docId, System.String fieldName, int start, int end, byte[] payload, int offset, int length)
+			{
+				//we know it is size 4 here, so ignore the offset/length
+				return payload[0];
+			}
+			
+			//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+			//Make everything else 1 so we see the effect of the payload
+			//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+			public override float LengthNorm(System.String fieldName, int numTerms)
+			{
+				return 1;
+			}
+			
+			public override float QueryNorm(float sumOfSquaredWeights)
+			{
+				return 1;
+			}
+			
+			public override float SloppyFreq(int distance)
+			{
+				return 1;
+			}
+			
+			public override float Coord(int overlap, int maxOverlap)
+			{
+				return 1;
+			}
+			
+			public override float Idf(int docFreq, int numDocs)
+			{
+				return 1;
+			}
+			
+			public override float Tf(float freq)
+			{
+				return freq == 0?0:1;
+			}
+		}
+		
+		[Serializable]
+		internal class FullSimilarity:DefaultSimilarity
+		{
+			public virtual float ScorePayload(int docId, System.String fieldName, byte[] payload, int offset, int length)
+			{
+				//we know it is size 4 here, so ignore the offset/length
+				return payload[0];
+			}
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/QueryUtils.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/QueryUtils.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/QueryUtils.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/QueryUtils.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,12 +19,24 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using MultiReader = Lucene.Net.Index.MultiReader;
+using MaxFieldLength = Lucene.Net.Index.IndexWriter.MaxFieldLength;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using ReaderUtil = Lucene.Net.Util.ReaderUtil;
+
 namespace Lucene.Net.Search
 {
+	
+	
+	
 	public class QueryUtils
 	{
 		[Serializable]
-		private class AnonymousClassQuery : Query
+		private class AnonymousClassQuery:Query
 		{
 			public override System.String ToString(System.String field)
 			{
@@ -32,13 +44,13 @@
 			}
 			override public System.Object Clone()
 			{
+                System.Diagnostics.Debug.Fail("Port issue:", "Do we need QueryUtils.AnonymousClassQuery.Clone()?");
 				return null;
 			}
 		}
-
-		private class AnonymousClassHitCollector : HitCollector
+		private class AnonymousClassCollector:Collector
 		{
-			public AnonymousClassHitCollector(int[] order, int[] opidx, int skip_op, Lucene.Net.Search.Scorer scorer, int[] sdoc, float maxDiff, Lucene.Net.Search.Query q, Lucene.Net.Search.IndexSearcher s)
+			public AnonymousClassCollector(int[] order, int[] opidx, int skip_op, Lucene.Net.Search.Scorer scorer, int[] sdoc, float maxDiff, Lucene.Net.Search.Query q, Lucene.Net.Search.IndexSearcher s)
 			{
 				InitBlock(order, opidx, skip_op, scorer, sdoc, maxDiff, q, s);
 			}
@@ -61,14 +73,25 @@
 			private float maxDiff;
 			private Lucene.Net.Search.Query q;
 			private Lucene.Net.Search.IndexSearcher s;
-			public override void  Collect(int doc, float score)
+			private int base_Renamed = 0;
+			private Scorer sc;
+			
+			public override void  SetScorer(Scorer scorer)
+			{
+				this.sc = scorer;
+			}
+			
+			public override void  Collect(int doc)
 			{
+				doc = doc + base_Renamed;
+				float score = sc.Score();
 				try
 				{
 					int op = order[(opidx[0]++) % order.Length];
-					//System.out.println(op==skip_op ? "skip("+(sdoc[0]+1)+")":"next()");
-					bool more = op == skip_op?scorer.SkipTo(sdoc[0] + 1):scorer.Next();
-					sdoc[0] = scorer.Doc();
+					// System.out.println(op==skip_op ?
+					// "skip("+(sdoc[0]+1)+")":"next()");
+					bool more = op == skip_op?scorer.Advance(sdoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS:scorer.NextDoc() != DocIdSetIterator.NO_MORE_DOCS;
+					sdoc[0] = scorer.DocID();
 					float scorerScore = scorer.Score();
 					float scorerScore2 = scorer.Score();
 					float scoreDiff = System.Math.Abs(score - scorerScore);
@@ -83,14 +106,23 @@
 				}
 				catch (System.IO.IOException e)
 				{
-					throw new System.Exception("", e);
+					throw new System.SystemException("", e);
 				}
 			}
+			
+			public override void  SetNextReader(IndexReader reader, int docBase)
+			{
+				base_Renamed = docBase;
+			}
+			
+			public override bool AcceptsDocsOutOfOrder()
+			{
+				return true;
+			}
 		}
-		
-		private class AnonymousClassHitCollector1 : HitCollector
+		private class AnonymousClassCollector1:Collector
 		{
-			public AnonymousClassHitCollector1(int[] lastDoc, Lucene.Net.Search.Query q, Lucene.Net.Search.IndexSearcher s, float maxDiff)
+			public AnonymousClassCollector1(int[] lastDoc, Lucene.Net.Search.Query q, Lucene.Net.Search.IndexSearcher s, float maxDiff)
 			{
 				InitBlock(lastDoc, q, s, maxDiff);
 			}
@@ -105,17 +137,25 @@
 			private Lucene.Net.Search.Query q;
 			private Lucene.Net.Search.IndexSearcher s;
 			private float maxDiff;
-			public override void  Collect(int doc, float score)
+			private Scorer scorer;
+			private IndexReader reader;
+			public override void  SetScorer(Scorer scorer)
+			{
+				this.scorer = scorer;
+			}
+			public override void  Collect(int doc)
 			{
 				//System.out.println("doc="+doc);
+				float score = this.scorer.Score();
 				try
 				{
+					
 					for (int i = lastDoc[0] + 1; i <= doc; i++)
 					{
 						Weight w = q.Weight(s);
-						Scorer scorer = w.Scorer(s.GetIndexReader());
-						Assert.IsTrue(scorer.SkipTo(i), "query collected " + doc + " but skipTo(" + i + ") says no more docs!");
-						Assert.AreEqual(doc, scorer.Doc(), "query collected " + doc + " but skipTo(" + i + ") got to " + scorer.Doc());
+						Scorer scorer = w.Scorer(reader, true, false);
+						Assert.IsTrue(scorer.Advance(i) != DocIdSetIterator.NO_MORE_DOCS, "query collected " + doc + " but skipTo(" + i + ") says no more docs!");
+						Assert.AreEqual(doc, scorer.DocID(), "query collected " + doc + " but skipTo(" + i + ") got to " + scorer.DocID());
 						float skipToScore = scorer.Score();
 						Assert.AreEqual(skipToScore, scorer.Score(), maxDiff, "unstable skipTo(" + i + ") score!");
 						Assert.AreEqual(score, skipToScore, maxDiff, "query assigned doc " + doc + " a score of <" + score + "> but skipTo(" + i + ") has <" + skipToScore + ">!");
@@ -124,9 +164,18 @@
 				}
 				catch (System.IO.IOException e)
 				{
-					throw new System.Exception("", e);
+					throw new System.SystemException("", e);
 				}
 			}
+			public override void  SetNextReader(IndexReader reader, int docBase)
+			{
+				this.reader = reader;
+				lastDoc[0] = - 1;
+			}
+			public override bool AcceptsDocsOutOfOrder()
+			{
+				return false;
+			}
 		}
 		
 		/// <summary>Check the types of things query objects should be able to do. </summary>
@@ -154,14 +203,14 @@
 		
 		public static void  CheckEqual(Query q1, Query q2)
 		{
-			Assert.AreEqual(q1.ToString(), q2.ToString());
+			Assert.AreEqual(q1, q2);
 			Assert.AreEqual(q1.GetHashCode(), q2.GetHashCode());
 		}
 		
 		public static void  CheckUnequal(Query q1, Query q2)
 		{
-			Assert.IsTrue(q1.ToString() != q2.ToString());
-			Assert.IsTrue(q2.ToString() != q1.ToString());
+			Assert.IsTrue(!q1.Equals(q2));
+			Assert.IsTrue(!q2.Equals(q1));
 			
 			// possible this test can fail on a hash collision... if that
 			// happens, please change test to use a different example.
@@ -174,15 +223,28 @@
 			CheckHits.CheckExplanations(q, null, s, true);
 		}
 		
-		/// <summary> various query sanity checks on a searcher, including explanation checks.</summary>
-		/// <seealso cref="checkExplanations">
+		/// <summary> Various query sanity checks on a searcher, some checks are only done for
+		/// instanceof IndexSearcher.
+		/// 
+		/// </summary>
+		/// <seealso cref="Check(Query)">
+		/// </seealso>
+		/// <seealso cref="checkFirstSkipTo">
 		/// </seealso>
 		/// <seealso cref="checkSkipTo">
 		/// </seealso>
-		/// <seealso cref="Check(Query)">
+		/// <seealso cref="checkExplanations">
+		/// </seealso>
+		/// <seealso cref="checkSerialization">
+		/// </seealso>
+		/// <seealso cref="checkEqual">
 		/// </seealso>
 		public static void  Check(Query q1, Searcher s)
 		{
+			Check(q1, s, true);
+		}
+		private static void  Check(Query q1, Searcher s, bool wrap)
+		{
 			try
 			{
 				Check(q1);
@@ -193,17 +255,100 @@
 						IndexSearcher is_Renamed = (IndexSearcher) s;
 						CheckFirstSkipTo(q1, is_Renamed);
 						CheckSkipTo(q1, is_Renamed);
+						if (wrap)
+						{
+							Check(q1, WrapUnderlyingReader(is_Renamed, - 1), false);
+							Check(q1, WrapUnderlyingReader(is_Renamed, 0), false);
+							Check(q1, WrapUnderlyingReader(is_Renamed, + 1), false);
+						}
+					}
+					if (wrap)
+					{
+						Check(q1, WrapSearcher(s, - 1), false);
+						Check(q1, WrapSearcher(s, 0), false);
+						Check(q1, WrapSearcher(s, + 1), false);
 					}
 					CheckExplanations(q1, s);
 					CheckSerialization(q1, s);
+					
+					Query q2 = (Query) q1.Clone();
+					CheckEqual(s.Rewrite(q1), s.Rewrite(q2));
 				}
 			}
 			catch (System.IO.IOException e)
 			{
-				throw new System.Exception("", e);
+				throw new System.SystemException("", e);
+			}
+		}
+		
+		/// <summary> Given an IndexSearcher, returns a new IndexSearcher whose IndexReader 
+		/// is a MultiReader containing the Reader of the original IndexSearcher, 
+		/// as well as several "empty" IndexReaders -- some of which will have 
+		/// deleted documents in them.  This new IndexSearcher should 
+		/// behave exactly the same as the original IndexSearcher.
+		/// </summary>
+		/// <param name="s">the searcher to wrap
+		/// </param>
+		/// <param name="edge">if negative, s will be the first sub; if 0, s will be in the middle, if positive s will be the last sub
+		/// </param>
+		public static IndexSearcher WrapUnderlyingReader(IndexSearcher s, int edge)
+		{
+			
+			IndexReader r = s.GetIndexReader();
+			
+			// we can't put deleted docs before the nested reader, because
+			// it will throw off the docIds
+			IndexReader[] readers = new IndexReader[]{edge < 0?r:IndexReader.Open(MakeEmptyIndex(0)), IndexReader.Open(MakeEmptyIndex(0)), new MultiReader(new IndexReader[]{IndexReader.Open(MakeEmptyIndex(edge < 0?4:0)), IndexReader.Open(MakeEmptyIndex(0)), 0 == edge?r:IndexReader.Open(MakeEmptyIndex(0))}), IndexReader.Open(MakeEmptyIndex(0 < edge?0:7)), IndexReader.Open(MakeEmptyIndex(0)), new MultiReader(new IndexReader[]{IndexReader.Open(MakeEmptyIndex(0 < edge?0:5)), IndexReader.Open(MakeEmptyIndex(0)), 0 < edge?r:IndexReader.Open(MakeEmptyIndex(0))})};
+			IndexSearcher out_Renamed = new IndexSearcher(new MultiReader(readers));
+			out_Renamed.SetSimilarity(s.GetSimilarity());
+			return out_Renamed;
+		}
+		/// <summary> Given a Searcher, returns a new MultiSearcher wrapping the  
+		/// the original Searcher, 
+		/// as well as several "empty" IndexSearchers -- some of which will have
+		/// deleted documents in them.  This new MultiSearcher 
+		/// should behave exactly the same as the original Searcher.
+		/// </summary>
+		/// <param name="s">the Searcher to wrap
+		/// </param>
+		/// <param name="edge">if negative, s will be the first sub; if 0, s will be in hte middle, if positive s will be the last sub
+		/// </param>
+		public static MultiSearcher WrapSearcher(Searcher s, int edge)
+		{
+			
+			// we can't put deleted docs before the nested reader, because
+			// it will through off the docIds
+			Searcher[] searchers = new Searcher[]{edge < 0?s:new IndexSearcher(MakeEmptyIndex(0)), new MultiSearcher(new Searcher[]{new IndexSearcher(MakeEmptyIndex(edge < 0?65:0)), new IndexSearcher(MakeEmptyIndex(0)), 0 == edge?s:new IndexSearcher(MakeEmptyIndex(0))}), new IndexSearcher(MakeEmptyIndex(0 < edge?0:3)), new IndexSearcher(MakeEmptyIndex(0)), new MultiSearcher(new Searcher[]{new IndexSearcher(MakeEmptyIndex(0 < edge?0:5)), new IndexSearcher(MakeEmptyIndex(0)), 0 < edge?s:new IndexSearcher(MakeEmptyIndex(0))})};
+			MultiSearcher out_Renamed = new MultiSearcher(searchers);
+			out_Renamed.SetSimilarity(s.GetSimilarity());
+			return out_Renamed;
+		}
+		
+		private static RAMDirectory MakeEmptyIndex(int numDeletedDocs)
+		{
+			RAMDirectory d = new RAMDirectory();
+			IndexWriter w = new IndexWriter(d, new WhitespaceAnalyzer(), true, MaxFieldLength.LIMITED);
+			for (int i = 0; i < numDeletedDocs; i++)
+			{
+				w.AddDocument(new Document());
 			}
+			w.Commit();
+			w.DeleteDocuments(new MatchAllDocsQuery());
+			w.Commit();
+			
+			if (0 < numDeletedDocs)
+				Assert.IsTrue(w.HasDeletions(), "writer has no deletions");
+			
+			Assert.AreEqual(numDeletedDocs, w.MaxDoc(), "writer is missing some deleted docs");
+			Assert.AreEqual(0, w.NumDocs(), "writer has non-deleted docs");
+			w.Close();
+			IndexReader r = IndexReader.Open(d);
+			Assert.AreEqual(numDeletedDocs, r.NumDeletedDocs(), "reader has wrong number of deleted docs");
+			r.Close();
+			return d;
 		}
 		
+		
 		/// <summary>check that the query weight is serializable. </summary>
 		/// <throws>  IOException if serialization check fail.  </throws>
 		private static void  CheckSerialization(Query q, Searcher s)
@@ -213,15 +358,16 @@
 			{
 				System.IO.MemoryStream bos = new System.IO.MemoryStream();
 				System.IO.BinaryWriter oos = new System.IO.BinaryWriter(bos);
-				System.Runtime.Serialization.Formatters.Binary.BinaryFormatter formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
-				formatter.Serialize(oos.BaseStream, w);
+		        System.Runtime.Serialization.Formatters.Binary.BinaryFormatter formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
+		        formatter.Serialize(oos.BaseStream, w);
 				oos.Close();
 				System.IO.BinaryReader ois = new System.IO.BinaryReader(new System.IO.MemoryStream(bos.ToArray()));
-				formatter.Deserialize(ois.BaseStream);
+		        formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
+		        formatter.Deserialize(ois.BaseStream);
 				ois.Close();
 				
-				//skip rquals() test for now - most weights don't overide equals() and we won't add this just for the tests.
-				//TestCase.assertEquals("writeObject(w) != w.  ("+w+")",w2,w);   
+				//skip equals() test for now - most weights don't override equals() and we won't add this just for the tests.
+                //TestCase.Assert.AreEqual(w2,w,"writeObject(w) != w.  ("+w+")");   
 			}
 			catch (System.Exception e)
 			{
@@ -246,23 +392,30 @@
 			int[][] orders = new int[][]{new int[]{next_op}, new int[]{skip_op}, new int[]{skip_op, next_op}, new int[]{next_op, skip_op}, new int[]{skip_op, skip_op, next_op, next_op}, new int[]{next_op, next_op, skip_op, skip_op}, new int[]{skip_op, skip_op, skip_op, next_op, next_op}};
 			for (int k = 0; k < orders.Length; k++)
 			{
+				
 				int[] order = orders[k];
-				//System.out.print("Order:");for (int i = 0; i < order.length; i++) System.out.print(order[i]==skip_op ? " skip()":" next()"); System.out.println();
+				// System.out.print("Order:");for (int i = 0; i < order.length; i++)
+				// System.out.print(order[i]==skip_op ? " skip()":" next()");
+				// System.out.println();
 				int[] opidx = new int[]{0};
 				
 				Weight w = q.Weight(s);
-				Scorer scorer = w.Scorer(s.GetIndexReader());
+				Scorer scorer = w.Scorer(s.GetIndexReader(), true, false);
+				if (scorer == null)
+				{
+					continue;
+				}
 				
 				// FUTURE: ensure scorer.doc()==-1
 				
 				int[] sdoc = new int[]{- 1};
 				float maxDiff = 1e-5f;
-				s.Search(q, new AnonymousClassHitCollector(order, opidx, skip_op, scorer, sdoc, maxDiff, q, s));
+				s.Search(q, new AnonymousClassCollector(order, opidx, skip_op, scorer, sdoc, maxDiff, q, s));
 				
 				// make sure next call to scorer is false.
 				int op = order[(opidx[0]++) % order.Length];
-				//System.out.println(op==skip_op ? "last: skip()":"last: next()");
-				bool more = op == skip_op?scorer.SkipTo(sdoc[0] + 1):scorer.Next();
+				// System.out.println(op==skip_op ? "last: skip()":"last: next()");
+				bool more = (op == skip_op?scorer.Advance(sdoc[0] + 1):scorer.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS;
 				Assert.IsFalse(more);
 			}
 		}
@@ -273,12 +426,25 @@
 			//System.out.println("checkFirstSkipTo: "+q);
 			float maxDiff = 1e-5f;
 			int[] lastDoc = new int[]{- 1};
-			s.Search(q, new AnonymousClassHitCollector1(lastDoc, q, s, maxDiff));
-			Weight w = q.Weight(s);
-			Scorer scorer = w.Scorer(s.GetIndexReader());
-			bool more = scorer.SkipTo(lastDoc[0] + 1);
-			if (more)
-				Assert.IsFalse(more, "query's last doc was " + lastDoc[0] + " but skipTo(" + (lastDoc[0] + 1) + ") got to " + scorer.Doc());
+			s.Search(q, new AnonymousClassCollector1(lastDoc, q, s, maxDiff));
+			
+			System.Collections.IList readerList = new System.Collections.ArrayList();
+			ReaderUtil.GatherSubReaders(readerList, s.GetIndexReader());
+			IndexReader[] readers = (IndexReader[])(new System.Collections.ArrayList(readerList).ToArray(typeof(IndexReader)));
+			for (int i = 0; i < readers.Length; i++)
+			{
+				IndexReader reader = readers[i];
+				Weight w = q.Weight(s);
+				Scorer scorer = w.Scorer(reader, true, false);
+				
+				if (scorer != null)
+				{
+					bool more = scorer.Advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
+					
+					if (more && lastDoc[0] != - 1)
+						Assert.IsFalse(more, "query's last doc was " + lastDoc[0] + " but skipTo(" + (lastDoc[0] + 1) + ") got to " + scorer.DocID());
+				}
+			}
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/SampleComparable.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/SampleComparable.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/SampleComparable.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/SampleComparable.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -17,10 +17,13 @@
 
 using System;
 
+using NUnit.Framework;
+
 using IndexReader = Lucene.Net.Index.IndexReader;
 using Term = Lucene.Net.Index.Term;
 using TermDocs = Lucene.Net.Index.TermDocs;
 using TermEnum = Lucene.Net.Index.TermEnum;
+using StringHelper = Lucene.Net.Util.StringHelper;
 
 namespace Lucene.Net.Search
 {
@@ -43,7 +46,7 @@
 	/// 
 	/// 
 	/// </summary>
-	/// <version>  $Id: SampleComparable.java 564236 2007-08-09 15:21:19Z gsingers $
+	/// <version>  $Id: SampleComparable.java 801344 2009-08-05 18:05:06Z yonik $
 	/// </version>
 	/// <since> 1.4
 	/// </since>
@@ -55,20 +58,20 @@
 		{
 			private class AnonymousClassScoreDocComparator : ScoreDocComparator
 			{
-				public AnonymousClassScoreDocComparator(IndexReader reader, TermEnum enumerator, System.String field, AnonymousClassSortComparatorSource enclosingInstance)
+				public AnonymousClassScoreDocComparator(Lucene.Net.Index.IndexReader reader, Lucene.Net.Index.TermEnum enumerator, System.String field, AnonymousClassSortComparatorSource enclosingInstance)
 				{
 					InitBlock(reader, enumerator, field, enclosingInstance);
 				}
-				private void  InitBlock(IndexReader reader, TermEnum enumerator, System.String field, AnonymousClassSortComparatorSource enclosingInstance)
+				private void  InitBlock(Lucene.Net.Index.IndexReader reader, Lucene.Net.Index.TermEnum enumerator, System.String field, AnonymousClassSortComparatorSource enclosingInstance)
 				{
 					this.reader = reader;
 					this.enumerator = enumerator;
 					this.field = field;
 					this.enclosingInstance = enclosingInstance;
-					cachedValues = FillCache(reader, enumerator, field);
+					cachedValues = Enclosing_Instance.fillCache(reader, enumerator, field);
 				}
-				private IndexReader reader;
-				private TermEnum enumerator;
+				private Lucene.Net.Index.IndexReader reader;
+				private Lucene.Net.Index.TermEnum enumerator;
 				private System.String field;
 				private AnonymousClassSortComparatorSource enclosingInstance;
 				public AnonymousClassSortComparatorSource Enclosing_Instance
@@ -79,7 +82,6 @@
 					}
 					
 				}
-
 				protected internal System.IComparable[] cachedValues;
 				
 				public virtual int Compare(ScoreDoc i, ScoreDoc j)
@@ -99,7 +101,7 @@
 			}
 			public virtual ScoreDocComparator NewComparator(IndexReader reader, System.String fieldname)
 			{
-				System.String field = String.Intern(fieldname);
+				System.String field = StringHelper.Intern(fieldname);
 				TermEnum enumerator = reader.Terms(new Term(fieldname, ""));
 				try
 				{
@@ -124,9 +126,9 @@
 			/// <returns> Array of objects representing natural order of terms in field.
 			/// </returns>
 			/// <throws>  IOException If an error occurs reading the index. </throws>
-			public static System.IComparable[] FillCache(IndexReader reader, TermEnum enumerator, System.String fieldname)
+			protected internal virtual System.IComparable[] fillCache(IndexReader reader, TermEnum enumerator, System.String fieldname)
 			{
-				System.String field = String.Intern(fieldname);
+				System.String field = StringHelper.Intern(fieldname);
 				System.IComparable[] retArray = new System.IComparable[reader.MaxDoc()];
 				if (retArray.Length > 0)
 				{
@@ -140,7 +142,7 @@
 						do 
 						{
 							Term term = enumerator.Term();
-							if (term.Field() != field)
+							if ((System.Object) term.Field() != (System.Object) field)
 								break;
 							System.IComparable termval = GetComparable(term.Text());
 							termDocs.Seek(enumerator);
@@ -159,29 +161,11 @@
 				return retArray;
 			}
 			
-			internal static System.IComparable GetComparable(System.String termtext)
-			{
-				return new SampleComparable(termtext);
-			}
-		}
-		[Serializable]
-		private class AnonymousClassSortComparator : SortComparator
-		{
-			public override System.IComparable GetComparable(System.String termtext)
+			internal virtual System.IComparable GetComparable(System.String termtext)
 			{
 				return new SampleComparable(termtext);
 			}
 		}
-
-		public static SortComparatorSource GetComparatorSource()
-		{
-			return new AnonymousClassSortComparatorSource();
-		}
-
-		public static SortComparator GetComparator()
-		{
-			return new AnonymousClassSortComparator();
-		}
 		
 		internal System.String string_part;
 		internal System.Int32 int_part;
@@ -203,5 +187,33 @@
 			}
 			return i;
 		}
+		
+		public static SortComparatorSource GetComparatorSource()
+		{
+			return new AnonymousClassSortComparatorSource();
+		}
+		
+		[Serializable]
+		private sealed class InnerSortComparator:SortComparator
+		{
+			public /*protected internal*/ override System.IComparable GetComparable(System.String termtext)
+			{
+				return new SampleComparable(termtext);
+			}
+			public override int GetHashCode()
+			{
+				return this.GetType().FullName.GetHashCode();
+			}
+			public  override bool Equals(System.Object that)
+			{
+				return this.GetType().Equals(that.GetType());
+			}
+		}
+		
+		
+		public static SortComparator GetComparator()
+		{
+			return new InnerSortComparator();
+		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/SingleDocTestFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/SingleDocTestFilter.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/SingleDocTestFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/SingleDocTestFilter.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -17,6 +17,8 @@
 
 using System;
 
+using NUnit.Framework;
+
 using IndexReader = Lucene.Net.Index.IndexReader;
 using DocIdBitSet = Lucene.Net.Util.DocIdBitSet;
 
@@ -24,7 +26,7 @@
 {
 	
 	[Serializable]
-	public class SingleDocTestFilter : Lucene.Net.Search.Filter
+	public class SingleDocTestFilter:Filter
 	{
 		private int doc;
 		
@@ -32,31 +34,12 @@
 		{
 			this.doc = doc;
 		}
-
-        public override DocIdSet GetDocIdSet(IndexReader reader)
-        {
-            System.Collections.BitArray bits = new System.Collections.BitArray((reader.MaxDoc() % 64 == 0 ? reader.MaxDoc() / 64 : reader.MaxDoc() / 64 + 1) * 64);
-
-            for (int increment = 0; doc >= bits.Length; increment = +64)
-            {
-                bits.Length += increment;
-            }
-            bits.Set(doc, true);
-
-            return new DocIdBitSet(bits);
-        }
-        [System.Obsolete()]
-        public override System.Collections.BitArray Bits(IndexReader reader)
-        {
-            System.Collections.BitArray bits = new System.Collections.BitArray((reader.MaxDoc() % 64 == 0 ? reader.MaxDoc() / 64 : reader.MaxDoc() / 64 + 1) * 64);
-
-            for (int increment = 0; doc >= bits.Length; increment = +64)
-            {
-                bits.Length += increment;
-            }
-            bits.Set(doc, true);
-
-            return bits;
-        }
+		
+		public override DocIdSet GetDocIdSet(IndexReader reader)
+		{
+			System.Collections.BitArray bits = new System.Collections.BitArray((reader.MaxDoc() % 64 == 0?reader.MaxDoc() / 64:reader.MaxDoc() / 64 + 1) * 64);
+			bits.Set(doc, true);
+			return new DocIdBitSet(bits);
+		}
 	}
 }
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Search/Spans/JustCompileSearchSpans.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/Spans/JustCompileSearchSpans.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/Spans/JustCompileSearchSpans.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/Spans/JustCompileSearchSpans.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,158 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using IndexReader = Lucene.Net.Index.IndexReader;
+using Similarity = Lucene.Net.Search.Similarity;
+using Weight = Lucene.Net.Search.Weight;
+
+namespace Lucene.Net.Search.Spans
+{
+	
+	/// <summary> Holds all implementations of classes in the o.a.l.s.spans package as a
+	/// back-compatibility test. It does not run any tests per-se, however if
+	/// someone adds a method to an interface or abstract method to an abstract
+	/// class, one of the implementations here will fail to compile and so we know
+	/// back-compat policy was violated.
+	/// </summary>
+	sealed class JustCompileSearchSpans
+	{
+		
+		private const System.String UNSUPPORTED_MSG = "unsupported: used for back-compat testing only !";
+		
+		internal sealed class JustCompileSpans:Spans
+		{
+			
+			public override int Doc()
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+			
+			public override int End()
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+			
+			public override bool Next()
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+			
+			public override bool SkipTo(int target)
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+			
+			public override int Start()
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+			
+			public override System.Collections.ICollection GetPayload()
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+			
+			public override bool IsPayloadAvailable()
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+		}
+		
+		[Serializable]
+		internal sealed class JustCompileSpanQuery:SpanQuery
+		{
+			
+			public override System.String GetField()
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+			
+			public override Spans GetSpans(IndexReader reader)
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+			
+			/// <deprecated> delete in 3.0. 
+			/// </deprecated>
+			public override System.Collections.ICollection GetTerms()
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+			
+			public override System.String ToString(System.String field)
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+		}
+		
+		internal sealed class JustCompilePayloadSpans:Spans
+		{
+			
+			public override System.Collections.ICollection GetPayload()
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+			
+			public override bool IsPayloadAvailable()
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+			
+			public override int Doc()
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+			
+			public override int End()
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+			
+			public override bool Next()
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+			
+			public override bool SkipTo(int target)
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+			
+			public override int Start()
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+		}
+		
+		internal sealed class JustCompileSpanScorer:SpanScorer
+		{
+			
+			internal JustCompileSpanScorer(Spans spans, Weight weight, Similarity similarity, byte[] norms):base(spans, weight, similarity, norms)
+			{
+			}
+			
+			public /*protected internal*/ override bool SetFreqCurrentDoc()
+			{
+				throw new System.NotSupportedException(Lucene.Net.Search.Spans.JustCompileSearchSpans.UNSUPPORTED_MSG);
+			}
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/Spans/TestBasics.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/Spans/TestBasics.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/Spans/TestBasics.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/Spans/TestBasics.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,14 +19,20 @@
 
 using NUnit.Framework;
 
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
-using Lucene.Net.Search;
-using Searchable = Lucene.Net.Search.Searchable;
+using BooleanClause = Lucene.Net.Search.BooleanClause;
+using BooleanQuery = Lucene.Net.Search.BooleanQuery;
+using CheckHits = Lucene.Net.Search.CheckHits;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using PhraseQuery = Lucene.Net.Search.PhraseQuery;
+using Query = Lucene.Net.Search.Query;
+using QueryUtils = Lucene.Net.Search.QueryUtils;
+using TermQuery = Lucene.Net.Search.TermQuery;
 using English = Lucene.Net.Util.English;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
@@ -44,15 +50,13 @@
 	/// testing of the indexing and search code.
 	/// 
 	/// </summary>
-	/// <author>  Doug Cutting
-	/// </author>
-	[TestFixture]
-	public class TestBasics : LuceneTestCase
+    [TestFixture]
+	public class TestBasics:LuceneTestCase
 	{
 		private IndexSearcher searcher;
 		
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
 			base.SetUp();
 			RAMDirectory directory = new RAMDirectory();
@@ -60,7 +64,7 @@
 			//writer.infoStream = System.out;
 			for (int i = 0; i < 1000; i++)
 			{
-				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+				Document doc = new Document();
 				doc.Add(new Field("field", English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
 				writer.AddDocument(doc);
 			}
@@ -88,8 +92,8 @@
 		public virtual void  TestPhrase()
 		{
 			PhraseQuery query = new PhraseQuery();
-			query.Add(new Term("field", "seventy"));
-			query.Add(new Term("field", "seven"));
+			query.add(new Term("field", "seventy"));
+			query.add(new Term("field", "seven"));
 			CheckHits(query, new int[]{77, 177, 277, 377, 477, 577, 677, 777, 877, 977});
 		}
 		
@@ -97,8 +101,8 @@
 		public virtual void  TestPhrase2()
 		{
 			PhraseQuery query = new PhraseQuery();
-			query.Add(new Term("field", "seventish"));
-			query.Add(new Term("field", "sevenon"));
+			query.add(new Term("field", "seventish"));
+			query.add(new Term("field", "sevenon"));
 			CheckHits(query, new int[]{});
 		}
 		
@@ -142,9 +146,8 @@
 			SpanTermQuery term1 = new SpanTermQuery(new Term("field", "nine"));
 			SpanTermQuery term2 = new SpanTermQuery(new Term("field", "six"));
 			SpanNearQuery query = new SpanNearQuery(new SpanQuery[]{term1, term2}, 4, false);
-
-			//CheckHits(query, new int[] { 609, 629, 639, 649, 659, 669, 679, 689, 699, 906, 926, 936, 946, 956, 966, 976, 986, 996 });
-			CheckHits(query, new int[] { 609, 906, 629, 639, 649, 659, 669, 679, 689, 699, 926, 936, 946, 956, 966, 976, 986, 996 });
+			
+			CheckHits(query, new int[]{609, 629, 639, 649, 659, 669, 679, 689, 699, 906, 926, 936, 946, 956, 966, 976, 986, 996});
 		}
 		
 		[Test]
@@ -305,9 +308,8 @@
 			SpanOrQuery to2 = new SpanOrQuery(new SpanQuery[]{t5, t6});
 			
 			SpanNearQuery query = new SpanNearQuery(new SpanQuery[]{to1, to2}, 10, true);
-
-			//CheckHits(query, new int[] { 606, 607, 626, 627, 636, 637, 646, 647, 656, 657, 666, 667, 676, 677, 686, 687, 696, 697, 706, 707, 726, 727, 736, 737, 746, 747, 756, 757, 766, 767, 776, 777, 786, 787, 796, 797 });
-			CheckHits(query, new int[] { 606, 607, 706, 707, 626, 627, 636, 637, 646, 647, 656, 657, 666, 667, 676, 677, 686, 687, 696, 697, 726, 727, 736, 737, 746, 747, 756, 757, 766, 767, 776, 777, 786, 787, 796, 797 });
+			
+			CheckHits(query, new int[]{606, 607, 626, 627, 636, 637, 646, 647, 656, 657, 666, 667, 676, 677, 686, 687, 696, 697, 706, 707, 726, 727, 736, 737, 746, 747, 756, 757, 766, 767, 776, 777, 786, 787, 796, 797});
 		}
 		
 		[Test]
@@ -329,15 +331,58 @@
 			SpanOrQuery to2 = new SpanOrQuery(new SpanQuery[]{t5, t6});
 			
 			SpanNearQuery query = new SpanNearQuery(new SpanQuery[]{to1, to2}, 100, true);
-
-			//CheckHits(query, new int[] { 606, 607, 626, 627, 636, 637, 646, 647, 656, 657, 666, 667, 676, 677, 686, 687, 696, 697, 706, 707, 726, 727, 736, 737, 746, 747, 756, 757, 766, 767, 776, 777, 786, 787, 796, 797 });
-			CheckHits(query, new int[] { 606, 607, 706, 707, 626, 627, 636, 637, 646, 647, 656, 657, 666, 667, 676, 677, 686, 687, 696, 697, 726, 727, 736, 737, 746, 747, 756, 757, 766, 767, 776, 777, 786, 787, 796, 797 });
+			
+			CheckHits(query, new int[]{606, 607, 626, 627, 636, 637, 646, 647, 656, 657, 666, 667, 676, 677, 686, 687, 696, 697, 706, 707, 726, 727, 736, 737, 746, 747, 756, 757, 766, 767, 776, 777, 786, 787, 796, 797});
 		}
 		
+		[Test]
+		public virtual void  TestSpansSkipTo()
+		{
+			SpanTermQuery t1 = new SpanTermQuery(new Term("field", "seventy"));
+			SpanTermQuery t2 = new SpanTermQuery(new Term("field", "seventy"));
+			Spans s1 = t1.GetSpans(searcher.GetIndexReader());
+			Spans s2 = t2.GetSpans(searcher.GetIndexReader());
+			
+			Assert.IsTrue(s1.Next());
+			Assert.IsTrue(s2.Next());
+			
+			bool hasMore = true;
+			
+			do 
+			{
+				hasMore = SkipToAccoringToJavaDocs(s1, s1.Doc());
+				Assert.AreEqual(hasMore, s2.SkipTo(s2.Doc()));
+				Assert.AreEqual(s1.Doc(), s2.Doc());
+			}
+			while (hasMore);
+		}
+		
+		/// <summary>Skips to the first match beyond the current, whose document number is
+		/// greater than or equal to <i>target</i>. <p>Returns true iff there is such
+		/// a match.  <p>Behaves as if written: <pre>
+		/// boolean skipTo(int target) {
+		/// do {
+		/// if (!next())
+		/// return false;
+		/// } while (target > doc());
+		/// return true;
+		/// }
+		/// </pre>
+		/// </summary>
+		private bool SkipToAccoringToJavaDocs(Spans s, int target)
+		{
+			do 
+			{
+				if (!s.Next())
+					return false;
+			}
+			while (target > s.Doc());
+			return true;
+		}
 		
 		private void  CheckHits(Query query, int[] results)
 		{
-			Lucene.Net.Search.CheckHits.CheckHits_Renamed(query, "field", searcher, results);
+			Lucene.Net.Search.CheckHits.CheckHits_Renamed_Method(query, "field", searcher, results);
 		}
 	}
 }
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Search/Spans/TestFieldMaskingSpanQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/Spans/TestFieldMaskingSpanQuery.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/Spans/TestFieldMaskingSpanQuery.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/Spans/TestFieldMaskingSpanQuery.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,325 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using CheckHits = Lucene.Net.Search.CheckHits;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using Query = Lucene.Net.Search.Query;
+using QueryUtils = Lucene.Net.Search.QueryUtils;
+using Scorer = Lucene.Net.Search.Scorer;
+using Weight = Lucene.Net.Search.Weight;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search.Spans
+{
+	
+    [TestFixture]
+	public class TestFieldMaskingSpanQuery:LuceneTestCase
+	{
+		[Serializable]
+		private class AnonymousClassSpanTermQuery:SpanTermQuery
+		{
+			private void  InitBlock(TestFieldMaskingSpanQuery enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestFieldMaskingSpanQuery enclosingInstance;
+			public TestFieldMaskingSpanQuery Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal AnonymousClassSpanTermQuery(TestFieldMaskingSpanQuery enclosingInstance, Lucene.Net.Index.Term Param1):base(Param1)
+			{
+				InitBlock(enclosingInstance);
+			}
+			public override Query Rewrite(IndexReader reader)
+			{
+				return new SpanOrQuery(new SpanQuery[]{new SpanTermQuery(new Term("first", "sally")), new SpanTermQuery(new Term("first", "james"))});
+			}
+		}
+		
+		protected internal static Document Doc(Field[] fields)
+		{
+			Document doc = new Document();
+			for (int i = 0; i < fields.Length; i++)
+			{
+				doc.Add(fields[i]);
+			}
+			return doc;
+		}
+		
+		protected internal static Field Field(System.String name, System.String value_Renamed)
+		{
+			return new Field(name, value_Renamed, Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED);
+		}
+		
+		protected internal IndexSearcher searcher;
+		
+		[SetUp]
+		public override void  SetUp()
+		{
+			base.SetUp();
+			RAMDirectory directory = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			
+			writer.AddDocument(Doc(new Field[]{Field("id", "0"), Field("gender", "male"), Field("first", "james"), Field("last", "jones")}));
+			
+			writer.AddDocument(Doc(new Field[]{Field("id", "1"), Field("gender", "male"), Field("first", "james"), Field("last", "smith"), Field("gender", "female"), Field("first", "sally"), Field("last", "jones")}));
+			
+			writer.AddDocument(Doc(new Field[]{Field("id", "2"), Field("gender", "female"), Field("first", "greta"), Field("last", "jones"), Field("gender", "female"), Field("first", "sally"), Field("last", "smith"), Field("gender", "male"), Field("first", "james"), Field("last", "jones")}));
+			
+			writer.AddDocument(Doc(new Field[]{Field("id", "3"), Field("gender", "female"), Field("first", "lisa"), Field("last", "jones"), Field("gender", "male"), Field("first", "bob"), Field("last", "costas")}));
+			
+			writer.AddDocument(Doc(new Field[]{Field("id", "4"), Field("gender", "female"), Field("first", "sally"), Field("last", "smith"), Field("gender", "female"), Field("first", "linda"), Field("last", "dixit"), Field("gender", "male"), Field("first", "bubba"), Field("last", "jones")}));
+			
+			writer.Close();
+			searcher = new IndexSearcher(directory);
+		}
+		
+		[TearDown]
+		public override void  TearDown()
+		{
+			base.TearDown();
+			searcher.Close();
+		}
+		
+		protected internal virtual void  Check(SpanQuery q, int[] docs)
+		{
+			CheckHits.CheckHitCollector(q, null, searcher, docs);
+		}
+		
+        [Test]
+		public virtual void  TestRewrite0()
+		{
+			SpanQuery q = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("last", "sally")), "first");
+			q.SetBoost(8.7654321f);
+			SpanQuery qr = (SpanQuery) searcher.Rewrite(q);
+			
+			QueryUtils.CheckEqual(q, qr);
+			
+			Assert.AreEqual(1, qr.GetTerms().Count);
+		}
+		
+        [Test]
+		public virtual void  TestRewrite1()
+		{
+			// mask an anon SpanQuery class that rewrites to something else.
+			SpanQuery q = new FieldMaskingSpanQuery(new AnonymousClassSpanTermQuery(this, new Term("last", "sally")), "first");
+			
+			SpanQuery qr = (SpanQuery) searcher.Rewrite(q);
+			
+			QueryUtils.CheckUnequal(q, qr);
+			
+			Assert.AreEqual(2, qr.GetTerms().Count);
+		}
+		
+        [Test]
+		public virtual void  TestRewrite2()
+		{
+			SpanQuery q1 = new SpanTermQuery(new Term("last", "smith"));
+			SpanQuery q2 = new SpanTermQuery(new Term("last", "jones"));
+			SpanQuery q = new SpanNearQuery(new SpanQuery[]{q1, new FieldMaskingSpanQuery(q2, "last")}, 1, true);
+			Query qr = searcher.Rewrite(q);
+			
+			QueryUtils.CheckEqual(q, qr);
+			
+			System.Collections.Hashtable set_Renamed = new System.Collections.Hashtable();
+			qr.ExtractTerms(set_Renamed);
+			Assert.AreEqual(2, set_Renamed.Count);
+		}
+		
+        [Test]
+		public virtual void  TestEquality1()
+		{
+			SpanQuery q1 = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("last", "sally")), "first");
+			SpanQuery q2 = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("last", "sally")), "first");
+			SpanQuery q3 = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("last", "sally")), "XXXXX");
+			SpanQuery q4 = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("last", "XXXXX")), "first");
+			SpanQuery q5 = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("xXXX", "sally")), "first");
+			QueryUtils.CheckEqual(q1, q2);
+			QueryUtils.CheckUnequal(q1, q3);
+			QueryUtils.CheckUnequal(q1, q4);
+			QueryUtils.CheckUnequal(q1, q5);
+			
+			SpanQuery qA = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("last", "sally")), "first");
+			qA.SetBoost(9f);
+			SpanQuery qB = new FieldMaskingSpanQuery(new SpanTermQuery(new Term("last", "sally")), "first");
+			QueryUtils.CheckUnequal(qA, qB);
+			qB.SetBoost(9f);
+			QueryUtils.CheckEqual(qA, qB);
+		}
+		
+        [Test]
+		public virtual void  TestNoop0()
+		{
+			SpanQuery q1 = new SpanTermQuery(new Term("last", "sally"));
+			SpanQuery q = new FieldMaskingSpanQuery(q1, "first");
+			Check(q, new int[]{});
+		}
+        [Test]
+		public virtual void  TestNoop1()
+		{
+			SpanQuery q1 = new SpanTermQuery(new Term("last", "smith"));
+			SpanQuery q2 = new SpanTermQuery(new Term("last", "jones"));
+			SpanQuery q = new SpanNearQuery(new SpanQuery[]{q1, new FieldMaskingSpanQuery(q2, "last")}, 0, true);
+			Check(q, new int[]{1, 2});
+			q = new SpanNearQuery(new SpanQuery[]{new FieldMaskingSpanQuery(q1, "last"), new FieldMaskingSpanQuery(q2, "last")}, 0, true);
+			Check(q, new int[]{1, 2});
+		}
+		
+        [Test]
+		public virtual void  TestSimple1()
+		{
+			SpanQuery q1 = new SpanTermQuery(new Term("first", "james"));
+			SpanQuery q2 = new SpanTermQuery(new Term("last", "jones"));
+			SpanQuery q = new SpanNearQuery(new SpanQuery[]{q1, new FieldMaskingSpanQuery(q2, "first")}, - 1, false);
+			Check(q, new int[]{0, 2});
+			q = new SpanNearQuery(new SpanQuery[]{new FieldMaskingSpanQuery(q2, "first"), q1}, - 1, false);
+			Check(q, new int[]{0, 2});
+			q = new SpanNearQuery(new SpanQuery[]{q2, new FieldMaskingSpanQuery(q1, "last")}, - 1, false);
+			Check(q, new int[]{0, 2});
+			q = new SpanNearQuery(new SpanQuery[]{new FieldMaskingSpanQuery(q1, "last"), q2}, - 1, false);
+			Check(q, new int[]{0, 2});
+		}
+		
+        [Test]
+		public virtual void  TestSimple2()
+		{
+			SpanQuery q1 = new SpanTermQuery(new Term("gender", "female"));
+			SpanQuery q2 = new SpanTermQuery(new Term("last", "smith"));
+			SpanQuery q = new SpanNearQuery(new SpanQuery[]{q1, new FieldMaskingSpanQuery(q2, "gender")}, - 1, false);
+			Check(q, new int[]{2, 4});
+			q = new SpanNearQuery(new SpanQuery[]{new FieldMaskingSpanQuery(q1, "id"), new FieldMaskingSpanQuery(q2, "id")}, - 1, false);
+			Check(q, new int[]{2, 4});
+		}
+		
+        [Test]
+		public virtual void  TestSpans0()
+		{
+			SpanQuery q1 = new SpanTermQuery(new Term("gender", "female"));
+			SpanQuery q2 = new SpanTermQuery(new Term("first", "james"));
+			SpanQuery q = new SpanOrQuery(new SpanQuery[]{q1, new FieldMaskingSpanQuery(q2, "gender")});
+			Check(q, new int[]{0, 1, 2, 3, 4});
+			
+			Spans span = q.GetSpans(searcher.GetIndexReader());
+			
+			Assert.AreEqual(true, span.Next());
+			Assert.AreEqual(S(0, 0, 1), S(span));
+			
+			Assert.AreEqual(true, span.Next());
+			Assert.AreEqual(S(1, 0, 1), S(span));
+			
+			Assert.AreEqual(true, span.Next());
+			Assert.AreEqual(S(1, 1, 2), S(span));
+			
+			Assert.AreEqual(true, span.Next());
+			Assert.AreEqual(S(2, 0, 1), S(span));
+			
+			Assert.AreEqual(true, span.Next());
+			Assert.AreEqual(S(2, 1, 2), S(span));
+			
+			Assert.AreEqual(true, span.Next());
+			Assert.AreEqual(S(2, 2, 3), S(span));
+			
+			Assert.AreEqual(true, span.Next());
+			Assert.AreEqual(S(3, 0, 1), S(span));
+			
+			Assert.AreEqual(true, span.Next());
+			Assert.AreEqual(S(4, 0, 1), S(span));
+			
+			Assert.AreEqual(true, span.Next());
+			Assert.AreEqual(S(4, 1, 2), S(span));
+			
+			Assert.AreEqual(false, span.Next());
+		}
+		
+        [Test]
+		public virtual void  TestSpans1()
+		{
+			SpanQuery q1 = new SpanTermQuery(new Term("first", "sally"));
+			SpanQuery q2 = new SpanTermQuery(new Term("first", "james"));
+			SpanQuery qA = new SpanOrQuery(new SpanQuery[]{q1, q2});
+			SpanQuery qB = new FieldMaskingSpanQuery(qA, "id");
+			
+			Check(qA, new int[]{0, 1, 2, 4});
+			Check(qB, new int[]{0, 1, 2, 4});
+			
+			Spans spanA = qA.GetSpans(searcher.GetIndexReader());
+			Spans spanB = qB.GetSpans(searcher.GetIndexReader());
+			
+			while (spanA.Next())
+			{
+				Assert.IsTrue(spanB.Next(), "spanB not still going");
+				Assert.AreEqual(S(spanA), S(spanB), "spanA not equal spanB");
+			}
+			Assert.IsTrue(!(spanB.Next()), "spanB still going even tough spanA is done");
+		}
+		
+        [Test]
+		public virtual void  TestSpans2()
+		{
+			SpanQuery qA1 = new SpanTermQuery(new Term("gender", "female"));
+			SpanQuery qA2 = new SpanTermQuery(new Term("first", "james"));
+			SpanQuery qA = new SpanOrQuery(new SpanQuery[]{qA1, new FieldMaskingSpanQuery(qA2, "gender")});
+			SpanQuery qB = new SpanTermQuery(new Term("last", "jones"));
+			SpanQuery q = new SpanNearQuery(new SpanQuery[]{new FieldMaskingSpanQuery(qA, "id"), new FieldMaskingSpanQuery(qB, "id")}, - 1, false);
+			Check(q, new int[]{0, 1, 2, 3});
+			
+			Spans span = q.GetSpans(searcher.GetIndexReader());
+			
+			Assert.AreEqual(true, span.Next());
+			Assert.AreEqual(S(0, 0, 1), S(span));
+			
+			Assert.AreEqual(true, span.Next());
+			Assert.AreEqual(S(1, 1, 2), S(span));
+			
+			Assert.AreEqual(true, span.Next());
+			Assert.AreEqual(S(2, 0, 1), S(span));
+			
+			Assert.AreEqual(true, span.Next());
+			Assert.AreEqual(S(2, 2, 3), S(span));
+			
+			Assert.AreEqual(true, span.Next());
+			Assert.AreEqual(S(3, 0, 1), S(span));
+			
+			Assert.AreEqual(false, span.Next());
+		}
+		
+		public virtual System.String S(Spans span)
+		{
+			return S(span.Doc(), span.Start(), span.End());
+		}
+		public virtual System.String S(int doc, int start, int end)
+		{
+			return "s(" + doc + "," + start + "," + end + ")";
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/Spans/TestNearSpansOrdered.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/Spans/TestNearSpansOrdered.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/Spans/TestNearSpansOrdered.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/Spans/TestNearSpansOrdered.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,13 +19,13 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using QueryParser = Lucene.Net.QueryParsers.QueryParser;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using CheckHits = Lucene.Net.Search.CheckHits;
 using Explanation = Lucene.Net.Search.Explanation;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
@@ -36,23 +36,23 @@
 namespace Lucene.Net.Search.Spans
 {
 	
-	[TestFixture]
-	public class TestNearSpansOrdered : LuceneTestCase
+    [TestFixture]
+	public class TestNearSpansOrdered:LuceneTestCase
 	{
 		protected internal IndexSearcher searcher;
 		
 		public const System.String FIELD = "field";
-		public static readonly Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser(FIELD, new WhitespaceAnalyzer());
+		public static readonly QueryParser qp = new QueryParser(FIELD, new WhitespaceAnalyzer());
 		
 		[TearDown]
-		public override void TearDown()
+		public override void  TearDown()
 		{
 			base.TearDown();
 			searcher.Close();
 		}
 		
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
 			base.SetUp();
 			RAMDirectory directory = new RAMDirectory();
@@ -73,7 +73,6 @@
 		{
 			return new SpanNearQuery(new SpanQuery[]{new SpanTermQuery(new Term(FIELD, s1)), new SpanTermQuery(new Term(FIELD, s2)), new SpanTermQuery(new Term(FIELD, s3))}, slop, inOrder);
 		}
-
 		protected internal virtual SpanNearQuery MakeQuery()
 		{
 			return MakeQuery("w1", "w2", "w3", 1, true);
@@ -83,14 +82,13 @@
 		public virtual void  TestSpanNearQuery()
 		{
 			SpanNearQuery q = MakeQuery();
-			CheckHits.CheckHits_Renamed(q, FIELD, searcher, new int[]{0, 1});
+			CheckHits.CheckHits_Renamed_Method(q, FIELD, searcher, new int[]{0, 1});
 		}
 		
 		public virtual System.String S(Spans span)
 		{
 			return S(span.Doc(), span.Start(), span.End());
 		}
-
 		public virtual System.String S(int doc, int start, int end)
 		{
 			return "s(" + doc + "," + start + "," + end + ")";
@@ -179,10 +177,9 @@
 		public virtual void  TestSpanNearScorerSkipTo1()
 		{
 			SpanNearQuery q = MakeQuery();
-			Weight w = q.CreateWeight_ForNUnitTest(searcher);
-			Scorer s = w.Scorer(searcher.GetIndexReader());
-			Assert.AreEqual(true, s.SkipTo(1));
-			Assert.AreEqual(1, s.Doc());
+			Weight w = q.Weight(searcher);
+			Scorer s = w.Scorer(searcher.GetIndexReader(), true, false);
+			Assert.AreEqual(1, s.Advance(1));
 		}
 		/// <summary> not a direct test of NearSpans, but a demonstration of how/when
 		/// this causes problems
@@ -191,8 +188,8 @@
 		public virtual void  TestSpanNearScorerExplain()
 		{
 			SpanNearQuery q = MakeQuery();
-			Weight w = q.CreateWeight_ForNUnitTest(searcher);
-			Scorer s = w.Scorer(searcher.GetIndexReader());
+			Weight w = q.Weight(searcher);
+			Scorer s = w.Scorer(searcher.GetIndexReader(), true, false);
 			Explanation e = s.Explain(1);
 			Assert.IsTrue(0.0f < e.GetValue(), "Scorer explanation value for doc#1 isn't positive: " + e.ToString());
 		}