You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ar...@apache.org on 2009/11/03 19:06:38 UTC

svn commit: r832486 [25/29] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene...

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestTermRangeFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestTermRangeFilter.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestTermRangeFilter.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestTermRangeFilter.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,422 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
+namespace Lucene.Net.Search
+{
+	
+	/// <summary> A basic 'positive' Unit test class for the TermRangeFilter class.
+	/// 
+	/// <p>
+	/// NOTE: at the moment, this class only tests for 'positive' results,
+	/// it does not verify the results to ensure there are no 'false positives',
+	/// nor does it adequately test 'negative' results.  It also does not test
+	/// that garbage in results in an Exception.
+	/// </summary>
+    [TestFixture]
+	public class TestTermRangeFilter:BaseTestRangeFilter
+	{
+		
+		public TestTermRangeFilter(System.String name):base(name)
+		{
+		}
+		public TestTermRangeFilter():base()
+		{
+		}
+		
+        [Test]
+		public virtual void  TestRangeFilterId()
+		{
+			
+			IndexReader reader = IndexReader.Open(signedIndex.index);
+			IndexSearcher search = new IndexSearcher(reader);
+			
+			int medId = ((maxId - minId) / 2);
+			
+			System.String minIP = Pad(minId);
+			System.String maxIP = Pad(maxId);
+			System.String medIP = Pad(medId);
+			
+			int numDocs = reader.NumDocs();
+			
+			Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
+			
+			ScoreDoc[] result;
+			Query q = new TermQuery(new Term("body", "body"));
+			
+			// test id, bounded on both ends
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, maxIP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "find all");
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, maxIP, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "all but last");
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, maxIP, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "all but first");
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, maxIP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 2, result.Length, "all but ends");
+			
+			result = search.Search(q, new TermRangeFilter("id", medIP, maxIP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1 + maxId - medId, result.Length, "med and up");
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, medIP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1 + medId - minId, result.Length, "up to med");
+			
+			// unbounded id
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, null, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "min and up");
+			
+			result = search.Search(q, new TermRangeFilter("id", null, maxIP, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "max and down");
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, null, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "not min, but up");
+			
+			result = search.Search(q, new TermRangeFilter("id", null, maxIP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "not max, but down");
+			
+			result = search.Search(q, new TermRangeFilter("id", medIP, maxIP, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(maxId - medId, result.Length, "med and up, not max");
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, medIP, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(medId - minId, result.Length, "not min, up to med");
+			
+			// very small sets
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, minIP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "min,min,F,F");
+			result = search.Search(q, new TermRangeFilter("id", medIP, medIP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "med,med,F,F");
+			result = search.Search(q, new TermRangeFilter("id", maxIP, maxIP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "max,max,F,F");
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, minIP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "min,min,T,T");
+			result = search.Search(q, new TermRangeFilter("id", null, minIP, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "nul,min,F,T");
+			
+			result = search.Search(q, new TermRangeFilter("id", maxIP, maxIP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "max,max,T,T");
+			result = search.Search(q, new TermRangeFilter("id", maxIP, null, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "max,nul,T,T");
+			
+			result = search.Search(q, new TermRangeFilter("id", medIP, medIP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "med,med,T,T");
+		}
+		
+        [Test]
+		public virtual void  TestRangeFilterIdCollating()
+		{
+			
+			IndexReader reader = IndexReader.Open(signedIndex.index);
+			IndexSearcher search = new IndexSearcher(reader);
+			
+			System.Globalization.CompareInfo c = new System.Globalization.CultureInfo("en").CompareInfo;
+			
+			int medId = ((maxId - minId) / 2);
+			
+			System.String minIP = Pad(minId);
+			System.String maxIP = Pad(maxId);
+			System.String medIP = Pad(medId);
+			
+			int numDocs = reader.NumDocs();
+			
+			Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
+			
+			Hits result;
+			Query q = new TermQuery(new Term("body", "body"));
+			
+			// test id, bounded on both ends
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, maxIP, T, T, c));
+			Assert.AreEqual(numDocs, result.Length(), "find all");
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, maxIP, T, F, c));
+			Assert.AreEqual(numDocs - 1, result.Length(), "all but last");
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, maxIP, F, T, c));
+			Assert.AreEqual(numDocs - 1, result.Length(), "all but first");
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, maxIP, F, F, c));
+			Assert.AreEqual(numDocs - 2, result.Length(), "all but ends");
+			
+			result = search.Search(q, new TermRangeFilter("id", medIP, maxIP, T, T, c));
+			Assert.AreEqual(1 + maxId - medId, result.Length(), "med and up");
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, medIP, T, T, c));
+			Assert.AreEqual(1 + medId - minId, result.Length(), "up to med");
+			
+			// unbounded id
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, null, T, F, c));
+			Assert.AreEqual(numDocs, result.Length(), "min and up");
+			
+			result = search.Search(q, new TermRangeFilter("id", null, maxIP, F, T, c));
+			Assert.AreEqual(numDocs, result.Length(), "max and down");
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, null, F, F, c));
+			Assert.AreEqual(numDocs - 1, result.Length(), "not min, but up");
+			
+			result = search.Search(q, new TermRangeFilter("id", null, maxIP, F, F, c));
+			Assert.AreEqual(numDocs - 1, result.Length(), "not max, but down");
+			
+			result = search.Search(q, new TermRangeFilter("id", medIP, maxIP, T, F, c));
+			Assert.AreEqual(maxId - medId, result.Length(), "med and up, not max");
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, medIP, F, T, c));
+			Assert.AreEqual(medId - minId, result.Length(), "not min, up to med");
+			
+			// very small sets
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, minIP, F, F, c));
+			Assert.AreEqual(0, result.Length(), "min,min,F,F");
+			result = search.Search(q, new TermRangeFilter("id", medIP, medIP, F, F, c));
+			Assert.AreEqual(0, result.Length(), "med,med,F,F");
+			result = search.Search(q, new TermRangeFilter("id", maxIP, maxIP, F, F, c));
+			Assert.AreEqual(0, result.Length(), "max,max,F,F");
+			
+			result = search.Search(q, new TermRangeFilter("id", minIP, minIP, T, T, c));
+			Assert.AreEqual(1, result.Length(), "min,min,T,T");
+			result = search.Search(q, new TermRangeFilter("id", null, minIP, F, T, c));
+			Assert.AreEqual(1, result.Length(), "nul,min,F,T");
+			
+			result = search.Search(q, new TermRangeFilter("id", maxIP, maxIP, T, T, c));
+			Assert.AreEqual(1, result.Length(), "max,max,T,T");
+			result = search.Search(q, new TermRangeFilter("id", maxIP, null, T, F, c));
+			Assert.AreEqual(1, result.Length(), "max,nul,T,T");
+			
+			result = search.Search(q, new TermRangeFilter("id", medIP, medIP, T, T, c));
+			Assert.AreEqual(1, result.Length(), "med,med,T,T");
+		}
+		
+        [Test]
+		public virtual void  TestRangeFilterRand()
+		{
+			
+			IndexReader reader = IndexReader.Open(signedIndex.index);
+			IndexSearcher search = new IndexSearcher(reader);
+			
+			System.String minRP = Pad(signedIndex.minR);
+			System.String maxRP = Pad(signedIndex.maxR);
+			
+			int numDocs = reader.NumDocs();
+			
+			Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
+			
+			ScoreDoc[] result;
+			Query q = new TermQuery(new Term("body", "body"));
+			
+			// test extremes, bounded on both ends
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, maxRP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "find all");
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, maxRP, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "all but biggest");
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, maxRP, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "all but smallest");
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, maxRP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 2, result.Length, "all but extremes");
+			
+			// unbounded
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, null, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "smallest and up");
+			
+			result = search.Search(q, new TermRangeFilter("rand", null, maxRP, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "biggest and down");
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, null, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "not smallest, but up");
+			
+			result = search.Search(q, new TermRangeFilter("rand", null, maxRP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "not biggest, but down");
+			
+			// very small sets
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, minRP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "min,min,F,F");
+			result = search.Search(q, new TermRangeFilter("rand", maxRP, maxRP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "max,max,F,F");
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, minRP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "min,min,T,T");
+			result = search.Search(q, new TermRangeFilter("rand", null, minRP, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "nul,min,F,T");
+			
+			result = search.Search(q, new TermRangeFilter("rand", maxRP, maxRP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "max,max,T,T");
+			result = search.Search(q, new TermRangeFilter("rand", maxRP, null, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "max,nul,T,T");
+		}
+		
+        [Test]
+		public virtual void  TestRangeFilterRandCollating()
+		{
+			
+			// using the unsigned index because collation seems to ignore hyphens
+			IndexReader reader = IndexReader.Open(unsignedIndex.index);
+			IndexSearcher search = new IndexSearcher(reader);
+			
+			System.Globalization.CompareInfo c = new System.Globalization.CultureInfo("en").CompareInfo;
+			
+			System.String minRP = Pad(unsignedIndex.minR);
+			System.String maxRP = Pad(unsignedIndex.maxR);
+			
+			int numDocs = reader.NumDocs();
+			
+			Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
+			
+			Hits result;
+			Query q = new TermQuery(new Term("body", "body"));
+			
+			// test extremes, bounded on both ends
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, maxRP, T, T, c));
+			Assert.AreEqual(numDocs, result.Length(), "find all");
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, maxRP, T, F, c));
+			Assert.AreEqual(numDocs - 1, result.Length(), "all but biggest");
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, maxRP, F, T, c));
+			Assert.AreEqual(numDocs - 1, result.Length(), "all but smallest");
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, maxRP, F, F, c));
+			Assert.AreEqual(numDocs - 2, result.Length(), "all but extremes");
+			
+			// unbounded
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, null, T, F, c));
+			Assert.AreEqual(numDocs, result.Length(), "smallest and up");
+			
+			result = search.Search(q, new TermRangeFilter("rand", null, maxRP, F, T, c));
+			Assert.AreEqual(numDocs, result.Length(), "biggest and down");
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, null, F, F, c));
+			Assert.AreEqual(numDocs - 1, result.Length(), "not smallest, but up");
+			
+			result = search.Search(q, new TermRangeFilter("rand", null, maxRP, F, F, c));
+			Assert.AreEqual(numDocs - 1, result.Length(), "not biggest, but down");
+			
+			// very small sets
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, minRP, F, F, c));
+			Assert.AreEqual(0, result.Length(), "min,min,F,F");
+			result = search.Search(q, new TermRangeFilter("rand", maxRP, maxRP, F, F, c));
+			Assert.AreEqual(0, result.Length(), "max,max,F,F");
+			
+			result = search.Search(q, new TermRangeFilter("rand", minRP, minRP, T, T, c));
+			Assert.AreEqual(1, result.Length(), "min,min,T,T");
+			result = search.Search(q, new TermRangeFilter("rand", null, minRP, F, T, c));
+			Assert.AreEqual(1, result.Length(), "nul,min,F,T");
+			
+			result = search.Search(q, new TermRangeFilter("rand", maxRP, maxRP, T, T, c));
+			Assert.AreEqual(1, result.Length(), "max,max,T,T");
+			result = search.Search(q, new TermRangeFilter("rand", maxRP, null, T, F, c));
+			Assert.AreEqual(1, result.Length(), "max,nul,T,T");
+		}
+		
+        [Test]
+		public virtual void  TestFarsi()
+		{
+			
+			/* build an index */
+			RAMDirectory farsiIndex = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(farsiIndex, new SimpleAnalyzer(), T, IndexWriter.MaxFieldLength.LIMITED);
+			Document doc = new Document();
+			doc.Add(new Field("content", "\u0633\u0627\u0628", Field.Store.YES, Field.Index.UN_TOKENIZED));
+			doc.Add(new Field("body", "body", Field.Store.YES, Field.Index.UN_TOKENIZED));
+			writer.AddDocument(doc);
+			
+			writer.Optimize();
+			writer.Close();
+			
+			IndexReader reader = IndexReader.Open(farsiIndex);
+			IndexSearcher search = new IndexSearcher(reader);
+			Query q = new TermQuery(new Term("body", "body"));
+			
+			// Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in
+			// RuleBasedCollator.  However, the Arabic Locale seems to order the Farsi
+			// characters properly.
+			System.Globalization.CompareInfo collator = new System.Globalization.CultureInfo("ar").CompareInfo;
+			
+			// Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
+			// orders the U+0698 character before the U+0633 character, so the single
+			// index Term below should NOT be returned by a TermRangeFilter with a Farsi
+			// Collator (or an Arabic one for the case when Farsi is not supported).
+			Hits result = search.Search(q, new TermRangeFilter("content", "\u062F", "\u0698", T, T, collator));
+			Assert.AreEqual(0, result.Length(), "The index Term should not be included.");
+			
+			result = search.Search(q, new TermRangeFilter("content", "\u0633", "\u0638", T, T, collator));
+			Assert.AreEqual(1, result.Length(), "The index Term should be included.");
+			search.Close();
+		}
+		
+        [Test]
+		public virtual void  TestDanish()
+		{
+			
+			/* build an index */
+			RAMDirectory danishIndex = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(danishIndex, new SimpleAnalyzer(), T, IndexWriter.MaxFieldLength.LIMITED);
+			// Danish collation orders the words below in the given order
+			// (example taken from TestSort.testInternationalSort() ).
+			System.String[] words = new System.String[]{"H\u00D8T", "H\u00C5T", "MAND"};
+			for (int docnum = 0; docnum < words.Length; ++docnum)
+			{
+				Document doc = new Document();
+				doc.Add(new Field("content", words[docnum], Field.Store.YES, Field.Index.UN_TOKENIZED));
+				doc.Add(new Field("body", "body", Field.Store.YES, Field.Index.UN_TOKENIZED));
+				writer.AddDocument(doc);
+			}
+			writer.Optimize();
+			writer.Close();
+			
+			IndexReader reader = IndexReader.Open(danishIndex);
+			IndexSearcher search = new IndexSearcher(reader);
+			Query q = new TermQuery(new Term("body", "body"));
+			
+			System.Globalization.CompareInfo collator = new System.Globalization.CultureInfo("da" + "-" + "dk").CompareInfo;
+			Query query = new TermRangeQuery("content", "H\u00D8T", "MAND", false, false, collator);
+			
+			// Unicode order would not include "H\u00C5T" in [ "H\u00D8T", "MAND" ],
+			// but Danish collation does.
+			Hits result = search.Search(q, new TermRangeFilter("content", "H\u00D8T", "MAND", F, F, collator));
+			Assert.AreEqual(1, result.Length(), "The index Term should be included.");
+			
+			result = search.Search(q, new TermRangeFilter("content", "H\u00C5T", "MAND", F, F, collator));
+			Assert.AreEqual(0, result.Length(), "The index Term should not be included.");
+			search.Close();
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestTermRangeQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestTermRangeQuery.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestTermRangeQuery.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestTermRangeQuery.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,417 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
+using Tokenizer = Lucene.Net.Analysis.Tokenizer;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search
+{
+	
+	
+    [TestFixture]
+	public class TestTermRangeQuery:LuceneTestCase
+	{
+		
+		private int docCount = 0;
+		private RAMDirectory dir;
+		
+		[Test]
+		public override void  SetUp()
+		{
+			base.SetUp();
+			dir = new RAMDirectory();
+		}
+		
+        [Test]
+		public virtual void  TestExclusive()
+		{
+			Query query = new TermRangeQuery("content", "A", "C", false, false);
+			InitializeIndex(new System.String[]{"A", "B", "C", "D"});
+			IndexSearcher searcher = new IndexSearcher(dir);
+			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length, "A,B,C,D, only B in range");
+			searcher.Close();
+			
+			InitializeIndex(new System.String[]{"A", "B", "D"});
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length, "A,B,D, only B in range");
+			searcher.Close();
+			
+			AddDoc("C");
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length, "C added, still only B in range");
+			searcher.Close();
+		}
+		
+		//TODO: remove in Lucene 3.0
+        [Test]
+		public virtual void  TestDeprecatedCstrctors()
+		{
+			Query query = new RangeQuery(null, new Term("content", "C"), false);
+			InitializeIndex(new System.String[]{"A", "B", "C", "D"});
+			IndexSearcher searcher = new IndexSearcher(dir);
+			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(2, hits.Length, "A,B,C,D, only B in range");
+			searcher.Close();
+			
+			query = new RangeQuery(new Term("content", "C"), null, false);
+			InitializeIndex(new System.String[]{"A", "B", "C", "D"});
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length, "A,B,C,D, only B in range");
+			searcher.Close();
+		}
+		
+        [Test]
+		public virtual void  TestInclusive()
+		{
+			Query query = new TermRangeQuery("content", "A", "C", true, true);
+			
+			InitializeIndex(new System.String[]{"A", "B", "C", "D"});
+			IndexSearcher searcher = new IndexSearcher(dir);
+			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(3, hits.Length, "A,B,C,D - A,B,C in range");
+			searcher.Close();
+			
+			InitializeIndex(new System.String[]{"A", "B", "D"});
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(2, hits.Length, "A,B,D - A and B in range");
+			searcher.Close();
+			
+			AddDoc("C");
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(3, hits.Length, "C added - A, B, C in range");
+			searcher.Close();
+		}
+		
+        [Test]
+		public virtual void  TestEqualsHashcode()
+		{
+			Query query = new TermRangeQuery("content", "A", "C", true, true);
+			
+			query.SetBoost(1.0f);
+			Query other = new TermRangeQuery("content", "A", "C", true, true);
+			other.SetBoost(1.0f);
+			
+			Assert.AreEqual(query, query, "query equals itself is true");
+			Assert.AreEqual(query, other, "equivalent queries are equal");
+			Assert.AreEqual(query.GetHashCode(), other.GetHashCode(), "hashcode must return same value when equals is true");
+			
+			other.SetBoost(2.0f);
+			Assert.IsFalse(query.Equals(other), "Different boost queries are not equal");
+			
+			other = new TermRangeQuery("notcontent", "A", "C", true, true);
+			Assert.IsFalse(query.Equals(other), "Different fields are not equal");
+			
+			other = new TermRangeQuery("content", "X", "C", true, true);
+			Assert.IsFalse(query.Equals(other), "Different lower terms are not equal");
+			
+			other = new TermRangeQuery("content", "A", "Z", true, true);
+			Assert.IsFalse(query.Equals(other), "Different upper terms are not equal");
+			
+			query = new TermRangeQuery("content", null, "C", true, true);
+			other = new TermRangeQuery("content", null, "C", true, true);
+			Assert.AreEqual(query, other, "equivalent queries with null lowerterms are equal()");
+			Assert.AreEqual(query.GetHashCode(), other.GetHashCode(), "hashcode must return same value when equals is true");
+			
+			query = new TermRangeQuery("content", "C", null, true, true);
+			other = new TermRangeQuery("content", "C", null, true, true);
+			Assert.AreEqual(query, other, "equivalent queries with null upperterms are equal()");
+			Assert.AreEqual(query.GetHashCode(), other.GetHashCode(), "hashcode returns same value");
+			
+			query = new TermRangeQuery("content", null, "C", true, true);
+			other = new TermRangeQuery("content", "C", null, true, true);
+			Assert.IsFalse(query.Equals(other), "queries with different upper and lower terms are not equal");
+			
+			query = new TermRangeQuery("content", "A", "C", false, false);
+			other = new TermRangeQuery("content", "A", "C", true, true);
+			Assert.IsFalse(query.Equals(other), "queries with different inclusive are not equal");
+			
+			query = new TermRangeQuery("content", "A", "C", false, false);
+			other = new TermRangeQuery("content", "A", "C", false, false, System.Globalization.CultureInfo.CurrentCulture.CompareInfo);
+			Assert.IsFalse(query.Equals(other), "a query with a collator is not equal to one without");
+		}
+		
+        [Test]
+		public virtual void  TestExclusiveCollating()
+		{
+			Query query = new TermRangeQuery("content", "A", "C", false, false, new System.Globalization.CultureInfo("en").CompareInfo);
+			InitializeIndex(new System.String[]{"A", "B", "C", "D"});
+			IndexSearcher searcher = new IndexSearcher(dir);
+			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length, "A,B,C,D, only B in range");
+			searcher.Close();
+			
+			InitializeIndex(new System.String[]{"A", "B", "D"});
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length, "A,B,D, only B in range");
+			searcher.Close();
+			
+			AddDoc("C");
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length, "C added, still only B in range");
+			searcher.Close();
+		}
+		
+        [Test]
+		public virtual void  TestInclusiveCollating()
+		{
+			Query query = new TermRangeQuery("content", "A", "C", true, true, new System.Globalization.CultureInfo("en").CompareInfo);
+			
+			InitializeIndex(new System.String[]{"A", "B", "C", "D"});
+			IndexSearcher searcher = new IndexSearcher(dir);
+			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(3, hits.Length, "A,B,C,D - A,B,C in range");
+			searcher.Close();
+			
+			InitializeIndex(new System.String[]{"A", "B", "D"});
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(2, hits.Length, "A,B,D - A and B in range");
+			searcher.Close();
+			
+			AddDoc("C");
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(3, hits.Length, "C added - A, B, C in range");
+			searcher.Close();
+		}
+		
+        [Test]
+		public virtual void  TestFarsi()
+		{
+			// Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in
+			// RuleBasedCollator.  However, the Arabic Locale seems to order the Farsi
+			// characters properly.
+			System.Globalization.CompareInfo collator = new System.Globalization.CultureInfo("ar").CompareInfo;
+			Query query = new TermRangeQuery("content", "\u062F", "\u0698", true, true, collator);
+			// Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
+			// orders the U+0698 character before the U+0633 character, so the single
+			// index Term below should NOT be returned by a TermRangeQuery with a Farsi
+			// Collator (or an Arabic one for the case when Farsi is not supported).
+			InitializeIndex(new System.String[]{"\u0633\u0627\u0628"});
+			IndexSearcher searcher = new IndexSearcher(dir);
+			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(0, hits.Length, "The index Term should not be included.");
+			
+			query = new TermRangeQuery("content", "\u0633", "\u0638", true, true, collator);
+			hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length, "The index Term should be included.");
+			searcher.Close();
+		}
+		
+        [Test]
+		public virtual void  TestDanish()
+		{
+			System.Globalization.CompareInfo collator = new System.Globalization.CultureInfo("da" + "-" + "dk").CompareInfo;
+			// Danish collation orders the words below in the given order (example taken
+			// from TestSort.testInternationalSort() ).
+			System.String[] words = new System.String[]{"H\u00D8T", "H\u00C5T", "MAND"};
+			Query query = new TermRangeQuery("content", "H\u00D8T", "MAND", false, false, collator);
+			
+			// Unicode order would not include "H\u00C5T" in [ "H\u00D8T", "MAND" ],
+			// but Danish collation does.
+			InitializeIndex(words);
+			IndexSearcher searcher = new IndexSearcher(dir);
+			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length, "The index Term should be included.");
+			
+			query = new TermRangeQuery("content", "H\u00C5T", "MAND", false, false, collator);
+			hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(0, hits.Length, "The index Term should not be included.");
+			searcher.Close();
+		}
+		
+		private class SingleCharAnalyzer:Analyzer
+		{
+			
+			private class SingleCharTokenizer:Tokenizer
+			{
+				internal char[] buffer = new char[1];
+				internal bool done;
+				internal TermAttribute termAtt;
+				
+				public SingleCharTokenizer(System.IO.TextReader r):base(r)
+				{
+					termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
+				}
+				
+				public override bool IncrementToken()
+				{
+					int count = input.Read((System.Char[]) buffer, 0, buffer.Length);
+					if (done)
+						return false;
+					else
+					{
+						done = true;
+						if (count == 1)
+						{
+							termAtt.TermBuffer()[0] = buffer[0];
+							termAtt.SetTermLength(1);
+						}
+						else
+							termAtt.SetTermLength(0);
+						return true;
+					}
+				}
+				
+				public override void  Reset(System.IO.TextReader reader)
+				{
+					base.Reset(reader);
+					done = false;
+				}
+			}
+			
+			public override TokenStream ReusableTokenStream(System.String fieldName, System.IO.TextReader reader)
+			{
+				Tokenizer tokenizer = (Tokenizer) GetPreviousTokenStream();
+				if (tokenizer == null)
+				{
+					tokenizer = new SingleCharTokenizer(reader);
+					SetPreviousTokenStream(tokenizer);
+				}
+				else
+					tokenizer.Reset(reader);
+				return tokenizer;
+			}
+			
+			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
+			{
+				return new SingleCharTokenizer(reader);
+			}
+		}
+		
+		private void  InitializeIndex(System.String[] values)
+		{
+			InitializeIndex(values, new WhitespaceAnalyzer());
+		}
+		
+		private void  InitializeIndex(System.String[] values, Analyzer analyzer)
+		{
+			IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+			for (int i = 0; i < values.Length; i++)
+			{
+				InsertDoc(writer, values[i]);
+			}
+			writer.Close();
+		}
+		
+		private void  AddDoc(System.String content)
+		{
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
+			InsertDoc(writer, content);
+			writer.Close();
+		}
+		
+		private void  InsertDoc(IndexWriter writer, System.String content)
+		{
+			Document doc = new Document();
+			
+			doc.Add(new Field("id", "id" + docCount, Field.Store.YES, Field.Index.NOT_ANALYZED));
+			doc.Add(new Field("content", content, Field.Store.NO, Field.Index.ANALYZED));
+			
+			writer.AddDocument(doc);
+			docCount++;
+		}
+		
+		// LUCENE-38
+        [Test]
+		public virtual void  TestExclusiveLowerNull()
+		{
+			Analyzer analyzer = new SingleCharAnalyzer();
+			//http://issues.apache.org/jira/browse/LUCENE-38
+			Query query = new TermRangeQuery("content", null, "C", false, false);
+			InitializeIndex(new System.String[]{"A", "B", "", "C", "D"}, analyzer);
+			IndexSearcher searcher = new IndexSearcher(dir);
+			Hits hits = searcher.Search(query);
+			// When Lucene-38 is fixed, use the assert on the next line:
+			Assert.AreEqual(3, hits.Length(), "A,B,<empty string>,C,D => A, B & <empty string> are in range");
+			// until Lucene-38 is fixed, use this assert:
+            //Assert.AreEqual(2, hits.length(),"A,B,<empty string>,C,D => A, B & <empty string> are in range");
+			
+			searcher.Close();
+			InitializeIndex(new System.String[]{"A", "B", "", "D"}, analyzer);
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(query);
+			// When Lucene-38 is fixed, use the assert on the next line:
+			Assert.AreEqual(3, hits.Length(), "A,B,<empty string>,D => A, B & <empty string> are in range");
+			// until Lucene-38 is fixed, use this assert:
+            //Assert.AreEqual(2, hits.length(), "A,B,<empty string>,D => A, B & <empty string> are in range");
+			searcher.Close();
+			AddDoc("C");
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(query);
+			// When Lucene-38 is fixed, use the assert on the next line:
+			Assert.AreEqual(3, hits.Length(), "C added, still A, B & <empty string> are in range");
+			// until Lucene-38 is fixed, use this assert
+            //Assert.AreEqual(2, hits.length(), "C added, still A, B & <empty string> are in range");
+			searcher.Close();
+		}
+		
+		// LUCENE-38
+        [Test]
+		public virtual void  TestInclusiveLowerNull()
+		{
+			//http://issues.apache.org/jira/browse/LUCENE-38
+			Analyzer analyzer = new SingleCharAnalyzer();
+			Query query = new TermRangeQuery("content", null, "C", true, true);
+			InitializeIndex(new System.String[]{"A", "B", "", "C", "D"}, analyzer);
+			IndexSearcher searcher = new IndexSearcher(dir);
+			Hits hits = searcher.Search(query);
+			// When Lucene-38 is fixed, use the assert on the next line:
+			Assert.AreEqual(4, hits.Length(), "A,B,<empty string>,C,D => A,B,<empty string>,C in range");
+			// until Lucene-38 is fixed, use this assert
+            //Assert.AreEqual(3, hits.length(), "A,B,<empty string>,C,D => A,B,<empty string>,C in range");
+			searcher.Close();
+			InitializeIndex(new System.String[]{"A", "B", "", "D"}, analyzer);
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(query);
+			// When Lucene-38 is fixed, use the assert on the next line:
+			Assert.AreEqual(3, hits.Length(), "A,B,<empty string>,D - A, B and <empty string> in range");
+			// until Lucene-38 is fixed, use this assert
+            //Assert.AreEqual(2, hits.length(), "A,B,<empty string>,D => A, B and <empty string> in range");
+			searcher.Close();
+			AddDoc("C");
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(query);
+			// When Lucene-38 is fixed, use the assert on the next line:
+			Assert.AreEqual(4, hits.Length(), "C added => A,B,<empty string>,C in range");
+			// until Lucene-38 is fixed, use this assert
+            //Assert.AreEqual(3, hits.length(), "C added => A,B,<empty string>,C in range");
+			searcher.Close();
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestTermScorer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestTermScorer.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestTermScorer.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestTermScorer.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,24 +19,24 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
 	
-	[TestFixture]
-	public class TestTermScorer : LuceneTestCase
+    [TestFixture]
+	public class TestTermScorer:LuceneTestCase
 	{
-		private class AnonymousClassHitCollector : HitCollector
+		private class AnonymousClassCollector:Collector
 		{
-			public AnonymousClassHitCollector(System.Collections.IList docs, TestTermScorer enclosingInstance)
+			public AnonymousClassCollector(System.Collections.IList docs, TestTermScorer enclosingInstance)
 			{
 				InitBlock(docs, enclosingInstance);
 			}
@@ -55,11 +55,28 @@
 				}
 				
 			}
-			public override void  Collect(int doc, float score)
+			private int base_Renamed = 0;
+			private Scorer scorer;
+			public override void  SetScorer(Scorer scorer)
 			{
+				this.scorer = scorer;
+			}
+			
+			public override void  Collect(int doc)
+			{
+				float score = scorer.Score();
+				doc = doc + base_Renamed;
 				docs.Add(new TestHit(enclosingInstance, doc, score));
 				Assert.IsTrue(score > 0, "score " + score + " is not greater than 0");
-				Assert.IsTrue(doc == 0 || doc == 5, "Doc: " + doc + " does not equal: " + 0 + " or doc does not equaal: " + 5);
+				Assert.IsTrue(doc == 0 || doc == 5, "Doc: " + doc + " does not equal 0 or doc does not equal 5");
+			}
+			public override void  SetNextReader(IndexReader reader, int docBase)
+			{
+				base_Renamed = docBase;
+			}
+			public override bool AcceptsDocsOutOfOrder()
+			{
+				return true;
 			}
 		}
 		protected internal RAMDirectory directory;
@@ -70,12 +87,12 @@
 		protected internal IndexReader indexReader;
 		
 		
-		//public TestTermScorer(System.String s) : base(s)
-		//{
-		//}
+		public TestTermScorer(System.String s):base(s)
+		{
+		}
 		
-		[SetUp]
-		public override void SetUp()
+		[Test]
+		public override void  SetUp()
 		{
 			base.SetUp();
 			directory = new RAMDirectory();
@@ -102,14 +119,13 @@
 			
 			Weight weight = termQuery.Weight(indexSearcher);
 			
-			Lucene.Net.Search.TermScorer ts = new Lucene.Net.Search.TermScorer(weight, indexReader.TermDocs(allTerm), indexSearcher.GetSimilarity(), indexReader.Norms(FIELD));
-			Assert.IsTrue(ts != null, "ts is null and it shouldn't be");
+			TermScorer ts = new TermScorer(weight, indexReader.TermDocs(allTerm), indexSearcher.GetSimilarity(), indexReader.Norms(FIELD));
 			//we have 2 documents with the term all in them, one document for all the other values
 			System.Collections.IList docs = new System.Collections.ArrayList();
 			//must call next first
 			
 			
-			ts.Score(new AnonymousClassHitCollector(docs, this));
+			ts.Score(new AnonymousClassCollector(docs, this));
 			Assert.IsTrue(docs.Count == 2, "docs Size: " + docs.Count + " is not: " + 2);
 			TestHit doc0 = (TestHit) docs[0];
 			TestHit doc5 = (TestHit) docs[1];
@@ -145,12 +161,11 @@
 			Weight weight = termQuery.Weight(indexSearcher);
 			
 			TermScorer ts = new TermScorer(weight, indexReader.TermDocs(allTerm), indexSearcher.GetSimilarity(), indexReader.Norms(FIELD));
-			Assert.IsTrue(ts != null, "ts is null and it shouldn't be");
-			Assert.IsTrue(ts.Next() == true, "next did not return a doc");
+			Assert.IsTrue(ts.NextDoc() != DocIdSetIterator.NO_MORE_DOCS, "next did not return a doc");
 			Assert.IsTrue(ts.Score() == 1.6931472f, "score is not correct");
-			Assert.IsTrue(ts.Next() == true, "next did not return a doc");
+			Assert.IsTrue(ts.NextDoc() != DocIdSetIterator.NO_MORE_DOCS, "next did not return a doc");
 			Assert.IsTrue(ts.Score() == 1.6931472f, "score is not correct");
-			Assert.IsTrue(ts.Next() == false, "next returned a doc and it should not have");
+			Assert.IsTrue(ts.NextDoc() == DocIdSetIterator.NO_MORE_DOCS, "next returned a doc and it should not have");
 		}
 		
 		[Test]
@@ -163,10 +178,9 @@
 			Weight weight = termQuery.Weight(indexSearcher);
 			
 			TermScorer ts = new TermScorer(weight, indexReader.TermDocs(allTerm), indexSearcher.GetSimilarity(), indexReader.Norms(FIELD));
-			Assert.IsTrue(ts != null, "ts is null and it shouldn't be");
-			Assert.IsTrue(ts.SkipTo(3) == true, "Didn't skip");
+			Assert.IsTrue(ts.Advance(3) != DocIdSetIterator.NO_MORE_DOCS, "Didn't skip");
 			//The next doc should be doc 5
-			Assert.IsTrue(ts.Doc() == 5, "doc should be number 5");
+			Assert.IsTrue(ts.DocID() == 5, "doc should be number 5");
 		}
 		
 		[Test]
@@ -178,7 +192,6 @@
 			Weight weight = termQuery.Weight(indexSearcher);
 			
 			TermScorer ts = new TermScorer(weight, indexReader.TermDocs(allTerm), indexSearcher.GetSimilarity(), indexReader.Norms(FIELD));
-			Assert.IsTrue(ts != null, "ts is null and it shouldn't be");
 			Explanation explanation = ts.Explain(0);
 			Assert.IsTrue(explanation != null, "explanation is null and it shouldn't be");
 			//System.out.println("Explanation: " + explanation.toString());
@@ -195,7 +208,6 @@
 			weight = termQuery.Weight(indexSearcher);
 			
 			ts = new TermScorer(weight, indexReader.TermDocs(dogsTerm), indexSearcher.GetSimilarity(), indexReader.Norms(FIELD));
-			Assert.IsTrue(ts != null, "ts is null and it shouldn't be");
 			explanation = ts.Explain(1);
 			Assert.IsTrue(explanation != null, "explanation is null and it shouldn't be");
 			//System.out.println("Explanation: " + explanation.toString());

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestTermVectors.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestTermVectors.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestTermVectors.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestTermVectors.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,7 +19,6 @@
 
 using NUnit.Framework;
 
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
@@ -27,17 +26,22 @@
 using Directory = Lucene.Net.Store.Directory;
 using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using English = Lucene.Net.Util.English;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
-	[TestFixture]
-	public class TestTermVectors : LuceneTestCase
+	
+    [TestFixture]
+	public class TestTermVectors:LuceneTestCase
 	{
 		private IndexSearcher searcher;
 		private Directory directory = new MockRAMDirectory();
+		public TestTermVectors(System.String s):base(s)
+		{
+		}
 		
-		[SetUp]
-		public override void SetUp()
+		[Test]
+		public override void  SetUp()
 		{
 			base.SetUp();
 			IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
@@ -45,7 +49,7 @@
 			//writer.infoStream = System.out;
 			for (int i = 0; i < 1000; i++)
 			{
-				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+				Document doc = new Document();
 				Field.TermVector termVector;
 				int mod3 = i % 3;
 				int mod2 = i % 2;
@@ -79,7 +83,7 @@
 		}
 		
 		[Test]
-		public virtual void  TestTermVectors_Renamed_Method()
+		public virtual void  TestTermVectors_Renamed()
 		{
 			Query query = new TermQuery(new Term("field", "seventy"));
 			try
@@ -89,52 +93,52 @@
 				
 				for (int i = 0; i < hits.Length; i++)
 				{
-					TermFreqVector[] vector = searcher.Reader.GetTermFreqVectors(hits[i].doc);
+					TermFreqVector[] vector = searcher.reader_ForNUnit.GetTermFreqVectors(hits[i].doc);
 					Assert.IsTrue(vector != null);
 					Assert.IsTrue(vector.Length == 1);
 				}
 			}
-			catch (System.IO.IOException)
+			catch (System.IO.IOException e)
 			{
 				Assert.IsTrue(false);
 			}
 		}
-
-        [Test]
-        public void TestTermVectorsFieldOrder()
-        {
-            Directory dir = new MockRAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-            Document doc = new Document();
-            doc.Add(new Field("c", "some content here", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
-            doc.Add(new Field("a", "some content here", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
-            doc.Add(new Field("b", "some content here", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
-            doc.Add(new Field("x", "some content here", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
-            writer.AddDocument(doc);
-            writer.Close();
-            IndexReader reader = IndexReader.Open(dir);
-            TermFreqVector[] v = reader.GetTermFreqVectors(0);
-            Assert.AreEqual(4, v.Length);
-            String[] expectedFields = new String[] { "a", "b", "c", "x" };
-            int[] expectedPositions = new int[] { 1, 2, 0 };
-            for (int i = 0; i < v.Length; i++)
-            {
-                TermPositionVector posVec = (TermPositionVector)v[i];
-                Assert.AreEqual(expectedFields[i], posVec.GetField());
-                String[] terms = posVec.GetTerms();
-                Assert.AreEqual(3, terms.Length);
-                Assert.AreEqual("content", terms[0]);
-                Assert.AreEqual("here", terms[1]);
-                Assert.AreEqual("some", terms[2]);
-                for (int j = 0; j < 3; j++)
-                {
-                    int[] positions = posVec.GetTermPositions(j);
-                    Assert.AreEqual(1, positions.Length);
-                    Assert.AreEqual(expectedPositions[j], positions[0]);
-                }
-            }
-        }
-
+		
+		[Test]
+		public virtual void  TestTermVectorsFieldOrder()
+		{
+			Directory dir = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			Document doc = new Document();
+			doc.Add(new Field("c", "some content here", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+			doc.Add(new Field("a", "some content here", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+			doc.Add(new Field("b", "some content here", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+			doc.Add(new Field("x", "some content here", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+			writer.AddDocument(doc);
+			writer.Close();
+			IndexReader reader = IndexReader.Open(dir);
+			TermFreqVector[] v = reader.GetTermFreqVectors(0);
+			Assert.AreEqual(4, v.Length);
+			System.String[] expectedFields = new System.String[]{"a", "b", "c", "x"};
+			int[] expectedPositions = new int[]{1, 2, 0};
+			for (int i = 0; i < v.Length; i++)
+			{
+				TermPositionVector posVec = (TermPositionVector) v[i];
+				Assert.AreEqual(expectedFields[i], posVec.GetField());
+				System.String[] terms = posVec.GetTerms();
+				Assert.AreEqual(3, terms.Length);
+				Assert.AreEqual("content", terms[0]);
+				Assert.AreEqual("here", terms[1]);
+				Assert.AreEqual("some", terms[2]);
+				for (int j = 0; j < 3; j++)
+				{
+					int[] positions = posVec.GetTermPositions(j);
+					Assert.AreEqual(1, positions.Length);
+					Assert.AreEqual(expectedPositions[j], positions[0]);
+				}
+			}
+		}
+		
 		[Test]
 		public virtual void  TestTermPositionVectors()
 		{
@@ -146,14 +150,14 @@
 				
 				for (int i = 0; i < hits.Length; i++)
 				{
-					TermFreqVector[] vector = searcher.Reader.GetTermFreqVectors(hits[i].doc);
+					TermFreqVector[] vector = searcher.reader_ForNUnit.GetTermFreqVectors(hits[i].doc);
 					Assert.IsTrue(vector != null);
 					Assert.IsTrue(vector.Length == 1);
-
-                    bool shouldBePosVector = (hits[i].doc % 2 == 0) ? true : false;
+					
+					bool shouldBePosVector = (hits[i].doc % 2 == 0)?true:false;
 					Assert.IsTrue((shouldBePosVector == false) || (shouldBePosVector == true && (vector[0] is TermPositionVector == true)));
-
-                    bool shouldBeOffVector = (hits[i].doc % 3 == 0) ? true : false;
+					
+					bool shouldBeOffVector = (hits[i].doc % 3 == 0)?true:false;
 					Assert.IsTrue((shouldBeOffVector == false) || (shouldBeOffVector == true && (vector[0] is TermPositionVector == true)));
 					
 					if (shouldBePosVector || shouldBeOffVector)
@@ -191,7 +195,7 @@
 							TermPositionVector posVec = (TermPositionVector) vector[0];
 							Assert.IsTrue(false);
 						}
-						catch (System.InvalidCastException)
+						catch (System.InvalidCastException ignore)
 						{
 							TermFreqVector freqVec = vector[0];
 							System.String[] terms = freqVec.GetTerms();
@@ -200,7 +204,7 @@
 					}
 				}
 			}
-			catch (System.IO.IOException)
+			catch (System.IO.IOException e)
 			{
 				Assert.IsTrue(false);
 			}
@@ -217,14 +221,14 @@
 				
 				for (int i = 0; i < hits.Length; i++)
 				{
-					TermFreqVector[] vector = searcher.Reader.GetTermFreqVectors(hits[i].doc);
+					TermFreqVector[] vector = searcher.reader_ForNUnit.GetTermFreqVectors(hits[i].doc);
 					Assert.IsTrue(vector != null);
 					Assert.IsTrue(vector.Length == 1);
 					
 					//Assert.IsTrue();
 				}
 			}
-			catch (System.IO.IOException)
+			catch (System.IO.IOException e)
 			{
 				Assert.IsTrue(false);
 			}
@@ -250,13 +254,13 @@
 			test4Map["computer"] = 1;
 			test4Map["old"] = 1;
 			
-			Lucene.Net.Documents.Document testDoc1 = new Lucene.Net.Documents.Document();
+			Document testDoc1 = new Document();
 			SetupDoc(testDoc1, test1);
-			Lucene.Net.Documents.Document testDoc2 = new Lucene.Net.Documents.Document();
+			Document testDoc2 = new Document();
 			SetupDoc(testDoc2, test2);
-			Lucene.Net.Documents.Document testDoc3 = new Lucene.Net.Documents.Document();
+			Document testDoc3 = new Document();
 			SetupDoc(testDoc3, test3);
-			Lucene.Net.Documents.Document testDoc4 = new Lucene.Net.Documents.Document();
+			Document testDoc4 = new Document();
 			SetupDoc(testDoc4, test4);
 			
 			Directory dir = new MockRAMDirectory();
@@ -271,8 +275,8 @@
 				writer.AddDocument(testDoc4);
 				writer.Close();
 				IndexSearcher knownSearcher = new IndexSearcher(dir);
-				TermEnum termEnum = knownSearcher.Reader.Terms();
-				TermDocs termDocs = knownSearcher.Reader.TermDocs();
+				TermEnum termEnum = knownSearcher.reader_ForNUnit.Terms();
+				TermDocs termDocs = knownSearcher.reader_ForNUnit.TermDocs();
 				//System.out.println("Terms: " + termEnum.size() + " Orig Len: " + termArray.length);
 				
 				Similarity sim = knownSearcher.GetSimilarity();
@@ -286,7 +290,7 @@
 						int docId = termDocs.Doc();
 						int freq = termDocs.Freq();
 						//System.out.println("Doc Id: " + docId + " freq " + freq);
-						TermFreqVector vector = knownSearcher.Reader.GetTermFreqVector(docId, "field");
+						TermFreqVector vector = knownSearcher.reader_ForNUnit.GetTermFreqVector(docId, "field");
 						float tf = sim.Tf(freq);
 						float idf = sim.Idf(term, knownSearcher);
 						//float qNorm = sim.queryNorm()
@@ -321,7 +325,7 @@
 				Assert.IsTrue(hits[0].doc == 2);
 				Assert.IsTrue(hits[1].doc == 3);
 				Assert.IsTrue(hits[2].doc == 0);
-				TermFreqVector vector2 = knownSearcher.Reader.GetTermFreqVector(hits[1].doc, "field");
+				TermFreqVector vector2 = knownSearcher.reader_ForNUnit.GetTermFreqVector(hits[1].doc, "field");
 				Assert.IsTrue(vector2 != null);
 				//System.out.println("Vector: " + vector);
 				System.String[] terms = vector2.GetTerms();
@@ -345,25 +349,25 @@
 					Assert.IsTrue(freqInt == freq);
 				}
 				SortedTermVectorMapper mapper = new SortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
-                knownSearcher.Reader.GetTermFreqVector(hits[1].doc, mapper);
-				System.Collections.Generic.SortedDictionary<Object,Object> vectorEntrySet = mapper.GetTermVectorEntrySet();
+				knownSearcher.reader_ForNUnit.GetTermFreqVector(hits[1].doc, mapper);
+				System.Collections.Generic.SortedDictionary<object, object> vectorEntrySet = mapper.GetTermVectorEntrySet();
 				Assert.IsTrue(vectorEntrySet.Count == 10, "mapper.getTermVectorEntrySet() Size: " + vectorEntrySet.Count + " is not: " + 10);
 				TermVectorEntry last = null;
-				for (System.Collections.IEnumerator iterator = vectorEntrySet.Keys.GetEnumerator(); iterator.MoveNext(); )
+				for (System.Collections.IEnumerator iterator = vectorEntrySet.GetEnumerator(); iterator.MoveNext(); )
 				{
-					TermVectorEntry tve = (TermVectorEntry)iterator.Current;
+					TermVectorEntry tve = (TermVectorEntry) iterator.Current;
 					if (tve != null && last != null)
 					{
 						Assert.IsTrue(last.GetFrequency() >= tve.GetFrequency(), "terms are not properly sorted");
-						System.Int32 expectedFreq = (System.Int32)test4Map[tve.GetTerm()];
+						System.Int32 expectedFreq = (System.Int32) test4Map[tve.GetTerm()];
 						//we expect double the expectedFreq, since there are two fields with the exact same text and we are collapsing all fields
 						Assert.IsTrue(tve.GetFrequency() == 2 * expectedFreq, "Frequency is not correct:");
 					}
 					last = tve;
 				}
-
+				
 				FieldSortedTermVectorMapper fieldMapper = new FieldSortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
-                knownSearcher.Reader.GetTermFreqVector(hits[1].doc, fieldMapper);
+				knownSearcher.reader_ForNUnit.GetTermFreqVector(hits[1].doc, fieldMapper);
 				System.Collections.IDictionary map = fieldMapper.GetFieldToTerms();
 				Assert.IsTrue(map.Count == 2, "map Size: " + map.Count + " is not: " + 2);
 				vectorEntrySet = (System.Collections.Generic.SortedDictionary<Object,Object>) map["field"];
@@ -378,10 +382,10 @@
 			}
 		}
 		
-		private void  SetupDoc(Lucene.Net.Documents.Document doc, System.String text)
+		private void  SetupDoc(Document doc, System.String text)
 		{
-            doc.Add(new Field("field2", text, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
-            doc.Add(new Field("field", text, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
+			doc.Add(new Field("field2", text, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+			doc.Add(new Field("field", text, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
 			//System.out.println("Document: " + doc);
 		}
 		
@@ -411,16 +415,17 @@
 			Assert.AreEqual(10, hits.Length);
 			for (int i = 0; i < hits.Length; i++)
 			{
-				TermFreqVector[] vector = searcher.Reader.GetTermFreqVectors(hits[i].doc);
+				TermFreqVector[] vector = searcher.reader_ForNUnit.GetTermFreqVectors(hits[i].doc);
 				Assert.IsTrue(vector != null);
 				Assert.IsTrue(vector.Length == 1);
 			}
 		}
 		
+		
 		// In a single doc, for the same field, mix the term
 		// vectors up
 		[Test]
-		public virtual void  TestMixedVectrosVectors() // nice name
+		public virtual void  TestMixedVectrosVectors()
 		{
 			IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			Document doc = new Document();
@@ -437,8 +442,8 @@
 			Query query = new TermQuery(new Term("field", "one"));
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length);
-
-			TermFreqVector[] vector = searcher.Reader.GetTermFreqVectors(hits[0].doc);
+			
+			TermFreqVector[] vector = searcher.reader_ForNUnit.GetTermFreqVectors(hits[0].doc);
 			Assert.IsTrue(vector != null);
 			Assert.IsTrue(vector.Length == 1);
 			TermPositionVector tfv = (TermPositionVector) vector[0];

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestThreadSafe.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestThreadSafe.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestThreadSafe.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestThreadSafe.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,35 +19,36 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Lucene.Net.Documents;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 
 namespace Lucene.Net.Search
 {
 	
 	/// <summary> </summary>
-	/// <version>  $Id: TestThreadSafe.java 598296 2007-11-26 14:52:01Z mikemccand $
+	/// <version>  $Id: TestThreadSafe.java 741311 2009-02-05 21:53:40Z mikemccand $
 	/// </version>
-	[TestFixture]
-	public class TestThreadSafe : LuceneTestCase
+    [TestFixture]
+	public class TestThreadSafe:LuceneTestCase
 	{
-		internal System.Random r = new System.Random();
+		internal System.Random r;
 		internal Directory dir1;
-		//internal Directory dir2;
+		internal Directory dir2;
 		
 		internal IndexReader ir1;
-		//internal IndexReader ir2;
+		internal IndexReader ir2;
 		
 		internal System.String failure = null;
 		
 		
-		internal class Thr : SupportClass.ThreadClass
+		internal class Thr:SupportClass.ThreadClass
 		{
+			[Serializable]
 			private class AnonymousClassFieldSelector : FieldSelector
 			{
 				public AnonymousClassFieldSelector(Thr enclosingInstance)
@@ -98,7 +99,7 @@
 			internal int iter;
 			internal System.Random rand;
 			// pass in random in case we want to make things reproducable
-			public Thr(TestThreadSafe enclosingInstance, int iter, System.Random rand, int level)
+			public Thr(TestThreadSafe enclosingInstance, int iter, System.Random rand)
 			{
 				InitBlock(enclosingInstance);
 				this.iter = iter;
@@ -119,19 +120,19 @@
 						switch (rand.Next(1))
 						{
 							
-							case 0:  LoadDoc(Enclosing_Instance.ir1); break;
+							case 0:  loadDoc(Enclosing_Instance.ir1); break;
 							}
 					}
 				}
 				catch (System.Exception th)
 				{
 					Enclosing_Instance.failure = th.ToString();
-					Assert.Fail(Enclosing_Instance.failure);
+					Assert.Fail(Enclosing_Instance.failure); // TestCase.fail(Enclosing_Instance.failure);
 				}
 			}
 			
 			
-			internal virtual void  LoadDoc(IndexReader ir)
+			internal virtual void  loadDoc(IndexReader ir)
 			{
 				// beware of deleted docs in the future
 				Document doc = ir.Document(rand.Next(ir.MaxDoc()), new AnonymousClassFieldSelector(this));
@@ -187,7 +188,7 @@
 			Thr[] tarr = new Thr[nThreads];
 			for (int i = 0; i < nThreads; i++)
 			{
-				tarr[i] = new Thr(this, iter, new System.Random(), 1);
+				tarr[i] = new Thr(this, iter, new System.Random((System.Int32) r.Next(System.Int32.MaxValue)));
 				tarr[i].Start();
 			}
 			for (int i = 0; i < nThreads; i++)
@@ -196,13 +197,14 @@
 			}
 			if (failure != null)
 			{
-				Assert.Fail(failure);
+                Assert.Fail(failure); // TestCase.fail(failure);
 			}
 		}
 		
 		[Test]
 		public virtual void  TestLazyLoadThreadSafety()
 		{
+			r = NewRandom();
 			dir1 = new RAMDirectory();
 			// test w/ field sizes bigger than the buffer of an index input
 			BuildDir(dir1, 15, 5, 2000);