You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by cc...@apache.org on 2011/11/15 09:41:46 UTC

[Lucene.Net] svn commit: r1202091 [5/6] - in /incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk: src/core/ src/core/Index/ src/core/Search/ src/core/Store/ src/demo/Demo.Common/ test/core/ test/core/Analysis/ test/core/Index/ test/core/QueryParser/ test/core/Sea...

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestFuzzyQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestFuzzyQuery.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestFuzzyQuery.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestFuzzyQuery.cs Tue Nov 15 08:41:44 2011
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using System.Collections.Generic;
 using NUnit.Framework;
 
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
@@ -41,138 +41,174 @@ namespace Lucene.Net.Search
     [TestFixture]
 	public class TestFuzzyQuery:LuceneTestCase
 	{
-		
-		[Test]
-		public virtual void  TestFuzziness()
-		{
-			RAMDirectory directory = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-			AddDoc("aaaaa", writer);
-			AddDoc("aaaab", writer);
-			AddDoc("aaabb", writer);
-			AddDoc("aabbb", writer);
-			AddDoc("abbbb", writer);
-			AddDoc("bbbbb", writer);
-			AddDoc("ddddd", writer);
-			writer.Optimize();
-			writer.Close();
-			IndexSearcher searcher = new IndexSearcher(directory);
-			
-			FuzzyQuery query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 0);
-			ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(3, hits.Length);
-			
-			// same with prefix
-			query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 1);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(3, hits.Length);
-			query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 2);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(3, hits.Length);
-			query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 3);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(3, hits.Length);
-			query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 4);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(2, hits.Length);
-			query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 5);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(1, hits.Length);
-			query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 6);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(1, hits.Length);
-			
-			// not similar enough:
-			query = new FuzzyQuery(new Term("field", "xxxxx"), FuzzyQuery.defaultMinSimilarity, 0);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(0, hits.Length);
-			query = new FuzzyQuery(new Term("field", "aaccc"), FuzzyQuery.defaultMinSimilarity, 0); // edit distance to "aaaaa" = 3
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(0, hits.Length);
-			
-			// query identical to a word in the index:
-			query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 0);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(3, hits.Length);
-			Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
-			// default allows for up to two edits:
-			Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
-			Assert.AreEqual(searcher.Doc(hits[2].doc).Get("field"), ("aaabb"));
-			
-			// query similar to a word in the index:
-			query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 0);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(3, hits.Length);
-			Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
-			Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
-			Assert.AreEqual(searcher.Doc(hits[2].doc).Get("field"), ("aaabb"));
-			
-			// now with prefix
-			query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 1);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(3, hits.Length);
-			Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
-			Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
-			Assert.AreEqual(searcher.Doc(hits[2].doc).Get("field"), ("aaabb"));
-			query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 2);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(3, hits.Length);
-			Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
-			Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
-			Assert.AreEqual(searcher.Doc(hits[2].doc).Get("field"), ("aaabb"));
-			query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 3);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(3, hits.Length);
-			Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
-			Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
-			Assert.AreEqual(searcher.Doc(hits[2].doc).Get("field"), ("aaabb"));
-			query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 4);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(2, hits.Length);
-			Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
-			Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
-			query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 5);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(0, hits.Length);
-			
-			
-			query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 0);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(1, hits.Length);
-			Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("ddddd"));
-			
-			// now with prefix
-			query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 1);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(1, hits.Length);
-			Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("ddddd"));
-			query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 2);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(1, hits.Length);
-			Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("ddddd"));
-			query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 3);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(1, hits.Length);
-			Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("ddddd"));
-			query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 4);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(1, hits.Length);
-			Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("ddddd"));
-			query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 5);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(0, hits.Length);
-			
-			
-			// different field = no match:
-			query = new FuzzyQuery(new Term("anotherfield", "ddddX"), FuzzyQuery.defaultMinSimilarity, 0);
-			hits = searcher.Search(query, null, 1000).ScoreDocs;
-			Assert.AreEqual(0, hits.Length);
-			
-			searcher.Close();
-			directory.Close();
-		}
-		
-		[Test]
+
+        [Test]
+        public virtual void TestFuzziness()
+        {
+            RAMDirectory directory = new RAMDirectory();
+            IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true,
+                                                 IndexWriter.MaxFieldLength.LIMITED);
+            AddDoc("aaaaa", writer);
+            AddDoc("aaaab", writer);
+            AddDoc("aaabb", writer);
+            AddDoc("aabbb", writer);
+            AddDoc("abbbb", writer);
+            AddDoc("bbbbb", writer);
+            AddDoc("ddddd", writer);
+            writer.Optimize();
+            writer.Close();
+            IndexSearcher searcher = new IndexSearcher(directory, true);
+
+            FuzzyQuery query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 0);
+            ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+
+            // same with prefix
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 1);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 2);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 3);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 4);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 5);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 6);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+
+            // test scoring
+            query = new FuzzyQuery(new Term("field", "bbbbb"), FuzzyQuery.defaultMinSimilarity, 0);
+            hits = searcher.Search(query, null, 1000).scoreDocs;
+            Assert.AreEqual(3, hits.Length, "3 documents should match");
+            List<String> order = new List<string>(new[] {"bbbbb", "abbbb", "aabbb"});
+            for (int i = 0; i < hits.Length; i++)
+            {
+                String term = searcher.Doc(hits[i].doc).Get("field");
+                //System.out.println(hits[i].score);
+                Assert.AreEqual(order[i], term);
+            }
+
+            // test BooleanQuery.maxClauseCount
+            int savedClauseCount = BooleanQuery.GetMaxClauseCount();
+            try
+            {
+                BooleanQuery.SetMaxClauseCount(2);
+                // This query would normally return 3 documents, because 3 terms match (see above):
+                query = new FuzzyQuery(new Term("field", "bbbbb"), FuzzyQuery.defaultMinSimilarity, 0);
+                hits = searcher.Search(query, null, 1000).scoreDocs;
+                Assert.AreEqual(2, hits.Length, "only 2 documents should match");
+                order = new List<string>(new[] {"bbbbb", "abbbb"});
+                for (int i = 0; i < hits.Length; i++)
+                {
+                    String term = searcher.Doc(hits[i].doc).Get("field");
+                    //System.out.println(hits[i].score);
+                    Assert.AreEqual(order[i], term);
+                }
+            }
+            finally
+            {
+                BooleanQuery.SetMaxClauseCount(savedClauseCount);
+            }
+
+            // not similar enough:
+            query = new FuzzyQuery(new Term("field", "xxxxx"), FuzzyQuery.defaultMinSimilarity, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+            query = new FuzzyQuery(new Term("field", "aaccc"), FuzzyQuery.defaultMinSimilarity, 0);
+                // edit distance to "aaaaa" = 3
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+
+            // query identical to a word in the index:
+            query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMinSimilarity, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
+            // default allows for up to two edits:
+            Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
+            Assert.AreEqual(searcher.Doc(hits[2].doc).Get("field"), ("aaabb"));
+
+            // query similar to a word in the index:
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
+            Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
+            Assert.AreEqual(searcher.Doc(hits[2].doc).Get("field"), ("aaabb"));
+
+            // now with prefix
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 1);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
+            Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
+            Assert.AreEqual(searcher.Doc(hits[2].doc).Get("field"), ("aaabb"));
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 2);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
+            Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
+            Assert.AreEqual(searcher.Doc(hits[2].doc).Get("field"), ("aaabb"));
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 3);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(3, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
+            Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
+            Assert.AreEqual(searcher.Doc(hits[2].doc).Get("field"), ("aaabb"));
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 4);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(2, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("aaaaa"));
+            Assert.AreEqual(searcher.Doc(hits[1].doc).Get("field"), ("aaaab"));
+            query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMinSimilarity, 5);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+
+
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("ddddd"));
+
+            // now with prefix
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 1);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("ddddd"));
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 2);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("ddddd"));
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 3);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("ddddd"));
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 4);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(1, hits.Length);
+            Assert.AreEqual(searcher.Doc(hits[0].doc).Get("field"), ("ddddd"));
+            query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMinSimilarity, 5);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+
+
+            // different field = no match:
+            query = new FuzzyQuery(new Term("anotherfield", "ddddX"), FuzzyQuery.defaultMinSimilarity, 0);
+            hits = searcher.Search(query, null, 1000).ScoreDocs;
+            Assert.AreEqual(0, hits.Length);
+
+            searcher.Close();
+            directory.Close();
+        }
+
+	    [Test]
 		public virtual void  TestFuzzinessLong()
 		{
 			RAMDirectory directory = new RAMDirectory();
@@ -181,7 +217,7 @@ namespace Lucene.Net.Search
 			AddDoc("segment", writer);
 			writer.Optimize();
 			writer.Close();
-			IndexSearcher searcher = new IndexSearcher(directory);
+	        IndexSearcher searcher = new IndexSearcher(directory, true);
 			
 			FuzzyQuery query;
 			// not similar enough:
@@ -277,7 +313,7 @@ namespace Lucene.Net.Search
 			AddDoc("segment", writer);
 			writer.Optimize();
 			writer.Close();
-			IndexSearcher searcher = new IndexSearcher(directory);
+		    IndexSearcher searcher = new IndexSearcher(directory, true);
 			
 			Query query;
 			// term not over 10 chars, so optimization shortcuts
@@ -305,7 +341,7 @@ namespace Lucene.Net.Search
 		public virtual void  TestGiga()
 		{
 			
-			StandardAnalyzer analyzer = new StandardAnalyzer();
+			StandardAnalyzer analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
 			
 			Directory index = new MockRAMDirectory();
 			IndexWriter w = new IndexWriter(index, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
@@ -330,7 +366,7 @@ namespace Lucene.Net.Search
 			IndexReader r = w.GetReader();
 			w.Close();
 			
-			Query q = new QueryParser("field", analyzer).Parse("giga~0.9");
+			Query q = new QueryParser(Util.Version.LUCENE_CURRENT, "field", analyzer).Parse("giga~0.9");
 			
 			// 3. search
 			IndexSearcher searcher = new IndexSearcher(r);

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcher.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcher.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcher.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcher.cs Tue Nov 15 08:41:44 2011
@@ -125,9 +125,9 @@ namespace Lucene.Net.Search
 			lDoc3.Add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));
 			
 			// creating an index writer for the first index
-			IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 			// creating an index writer for the second index, but writing nothing
-			IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 			
 			//--------------------------------------------------------------------
 			// scenario 1
@@ -144,7 +144,7 @@ namespace Lucene.Net.Search
 			writerB.Close();
 			
 			// creating the query
-			QueryParser parser = new QueryParser("fulltext", new StandardAnalyzer());
+			QueryParser parser = new QueryParser("fulltext", new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Query query = parser.Parse("handle:1");
 			
 			// building the searchables
@@ -172,7 +172,7 @@ namespace Lucene.Net.Search
 			//--------------------------------------------------------------------
 			
 			// adding one document to the empty index
-			writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
+			writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
 			writerB.AddDocument(lDoc);
 			writerB.Optimize();
 			writerB.Close();
@@ -219,7 +219,7 @@ namespace Lucene.Net.Search
 			readerB.Close();
 			
 			// optimizing the index with the writer
-			writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
+			writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
 			writerB.Optimize();
 			writerB.Close();
 			

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcherRanking.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcherRanking.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcherRanking.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcherRanking.cs Tue Nov 15 08:41:44 2011
@@ -109,7 +109,7 @@ namespace Lucene.Net.Search
 			// check result hit ranking
 			if (verbose)
 				System.Console.Out.WriteLine("Query: " + queryStr);
-			QueryParser queryParser = new QueryParser(FIELD_NAME, new StandardAnalyzer());
+			QueryParser queryParser = new QueryParser(FIELD_NAME, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Query query = queryParser.Parse(queryStr);
 			ScoreDoc[] multiSearcherHits = multiSearcher.Search(query, null, 1000).ScoreDocs;
 			ScoreDoc[] singleSearcherHits = singleSearcher.Search(query, null, 1000).ScoreDocs;
@@ -136,11 +136,11 @@ namespace Lucene.Net.Search
 			base.SetUp();
 			// create MultiSearcher from two seperate searchers
 			Directory d1 = new RAMDirectory();
-			IndexWriter iw1 = new IndexWriter(d1, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter iw1 = new IndexWriter(d1, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 			AddCollection1(iw1);
 			iw1.Close();
 			Directory d2 = new RAMDirectory();
-			IndexWriter iw2 = new IndexWriter(d2, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter iw2 = new IndexWriter(d2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 			AddCollection2(iw2);
 			iw2.Close();
 			
@@ -151,7 +151,7 @@ namespace Lucene.Net.Search
 			
 			// create IndexSearcher which contains all documents
 			Directory d = new RAMDirectory();
-			IndexWriter iw = new IndexWriter(d, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter iw = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 			AddCollection1(iw);
 			AddCollection2(iw);
 			iw.Close();

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPositionIncrement.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPositionIncrement.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPositionIncrement.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPositionIncrement.cs Tue Nov 15 08:41:44 2011
@@ -71,9 +71,9 @@ namespace Lucene.Net.Search
 				private void  InitBlock(AnonymousClassAnalyzer enclosingInstance)
 				{
 					this.enclosingInstance = enclosingInstance;
-					posIncrAtt = (PositionIncrementAttribute) AddAttribute(typeof(PositionIncrementAttribute));
-					termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
-					offsetAtt = (OffsetAttribute) AddAttribute(typeof(OffsetAttribute));
+					posIncrAtt =  AddAttribute<PositionIncrementAttribute>();
+					termAtt =  AddAttribute<TermAttribute>();
+					offsetAtt =  AddAttribute<OffsetAttribute>();
 				}
 				private AnonymousClassAnalyzer enclosingInstance;
 				public AnonymousClassAnalyzer Enclosing_Instance
@@ -411,9 +411,9 @@ namespace Lucene.Net.Search
 			this.fieldName = fieldName;
 			pos = 0;
 			i = 0;
-			posIncrAttr = (PositionIncrementAttribute) input.AddAttribute(typeof(PositionIncrementAttribute));
-			payloadAttr = (PayloadAttribute) input.AddAttribute(typeof(PayloadAttribute));
-			termAttr = (TermAttribute) input.AddAttribute(typeof(TermAttribute));
+			posIncrAttr =  input.AddAttribute<PositionIncrementAttribute>();
+			payloadAttr =  input.AddAttribute<PayloadAttribute>();
+			termAttr =  input.AddAttribute<TermAttribute>();
 		}
 		
 		public override bool IncrementToken()

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPositiveScoresOnlyCollector.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPositiveScoresOnlyCollector.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPositiveScoresOnlyCollector.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPositiveScoresOnlyCollector.cs Tue Nov 15 08:41:44 2011
@@ -36,22 +36,9 @@ namespace Lucene.Net.Search
 			{
 			}
 			
-			public override Explanation Explain(int doc)
-			{
-				return null;
-			}
-			
 			public override float Score()
 			{
-				return idx == Lucene.Net.Search.TestPositiveScoresOnlyCollector.scores.Length?System.Single.NaN:Lucene.Net.Search.TestPositiveScoresOnlyCollector.scores[idx];
-			}
-			
-			/// <deprecated> use {@link #DocID()} instead. 
-			/// </deprecated>
-            [Obsolete("use DocID() instead.")]
-			public override int Doc()
-			{
-				return idx;
+			    return idx == scores.Length ? float.NaN : scores[idx];
 			}
 			
 			public override int DocID()
@@ -59,31 +46,15 @@ namespace Lucene.Net.Search
 				return idx;
 			}
 			
-			/// <deprecated> use {@link #NextDoc()} instead. 
-			/// </deprecated>
-            [Obsolete("use NextDoc() instead.")]
-			public override bool Next()
-			{
-				return NextDoc() != NO_MORE_DOCS;
-			}
-			
 			public override int NextDoc()
 			{
-				return ++idx != Lucene.Net.Search.TestPositiveScoresOnlyCollector.scores.Length?idx:NO_MORE_DOCS;
-			}
-			
-			/// <deprecated> use {@link #Advance(int)} instead. 
-			/// </deprecated>
-            [Obsolete("use Advance(int) instead.")]
-			public override bool SkipTo(int target)
-			{
-				return Advance(target) != NO_MORE_DOCS;
+			    return ++idx != scores.Length ? idx : NO_MORE_DOCS;
 			}
 			
 			public override int Advance(int target)
 			{
 				idx = target;
-				return idx < Lucene.Net.Search.TestPositiveScoresOnlyCollector.scores.Length?idx:NO_MORE_DOCS;
+			    return idx < scores.Length ? idx : NO_MORE_DOCS;
 			}
 		}
 		
@@ -109,7 +80,7 @@ namespace Lucene.Net.Search
 			}
 			
 			Scorer s = new SimpleScorer();
-			TopDocsCollector tdc = TopScoreDocCollector.create(scores.Length, true);
+			TopDocsCollector<ScoreDoc> tdc = TopScoreDocCollector.create(scores.Length, true);
 			Collector c = new PositiveScoresOnlyCollector(tdc);
 			c.SetScorer(s);
 			while (s.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestQueryWrapperFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestQueryWrapperFilter.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestQueryWrapperFilter.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestQueryWrapperFilter.cs Tue Nov 15 08:41:44 2011
@@ -42,7 +42,7 @@ namespace Lucene.Net.Search
 		public virtual void  TestBasic()
 		{
 			Directory dir = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 			Document doc = new Document();
 			doc.Add(new Field("field", "value", Field.Store.NO, Field.Index.ANALYZED));
 			writer.AddDocument(doc);

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestScoreCachingWrappingScorer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestScoreCachingWrappingScorer.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestScoreCachingWrappingScorer.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestScoreCachingWrappingScorer.cs Tue Nov 15 08:41:44 2011
@@ -38,58 +38,29 @@ namespace Lucene.Net.Search
 			{
 			}
 			
-			public override Explanation Explain(int doc)
-			{
-				return null;
-			}
-			
 			public override float Score()
 			{
 				// advance idx on purpose, so that consecutive calls to score will get
 				// different results. This is to emulate computation of a score. If
 				// ScoreCachingWrappingScorer is used, this should not be called more than
 				// once per document.
-				return idx == Lucene.Net.Search.TestScoreCachingWrappingScorer.scores.Length?System.Single.NaN:Lucene.Net.Search.TestScoreCachingWrappingScorer.scores[idx++];
-			}
-			
-			/// <deprecated> use {@link #DocID()} instead. 
-			/// </deprecated>
-            [Obsolete("use DocID() instead.")]
-			public override int Doc()
-			{
-				return doc;
+			    return idx == scores.Length ? float.NaN : scores[idx++];
 			}
-			
+
 			public override int DocID()
 			{
 				return doc;
 			}
 			
-			/// <deprecated> use {@link #NextDoc()} instead. 
-			/// </deprecated>
-            [Obsolete("use NextDoc() instead.")]
-			public override bool Next()
-			{
-				return NextDoc() != NO_MORE_DOCS;
-			}
-			
 			public override int NextDoc()
 			{
-				return ++doc < Lucene.Net.Search.TestScoreCachingWrappingScorer.scores.Length?doc:NO_MORE_DOCS;
-			}
-			
-			/// <deprecated> use {@link #Advance(int)} instead. 
-			/// </deprecated>
-            [Obsolete("use Advance(int) instead.")]
-			public override bool SkipTo(int target)
-			{
-				return Advance(target) != NO_MORE_DOCS;
+				return ++doc < scores.Length?doc:NO_MORE_DOCS;
 			}
 			
 			public override int Advance(int target)
 			{
 				doc = target;
-				return doc < Lucene.Net.Search.TestScoreCachingWrappingScorer.scores.Length?doc:NO_MORE_DOCS;
+				return doc < scores.Length?doc:NO_MORE_DOCS;
 			}
 		}
 		

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSimilarity.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSimilarity.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSimilarity.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSimilarity.cs Tue Nov 15 08:41:44 2011
@@ -31,12 +31,7 @@ using LuceneTestCase = Lucene.Net.Util.L
 namespace Lucene.Net.Search
 {
 	
-	/// <summary>Similarity unit test.
-	/// 
-	/// 
-	/// </summary>
-	/// <version>  $Revision: 787772 $
-	/// </version>
+	/// <summary>Similarity unit test.</summary>
     [TestFixture]
 	public class TestSimilarity:LuceneTestCase
 	{
@@ -66,7 +61,7 @@ namespace Lucene.Net.Search
 			}
 			public override void  Collect(int doc)
 			{
-				Assert.IsTrue(scorer.Score() == 1.0f);
+				Assert.AreEqual(1.0f, scorer.Score());
 			}
 			public override void  SetNextReader(IndexReader reader, int docBase)
 			{
@@ -104,7 +99,7 @@ namespace Lucene.Net.Search
 			public override void  Collect(int doc)
 			{
 				//System.out.println("Doc=" + doc + " score=" + score);
-				Assert.IsTrue(scorer.Score() == (float) doc + base_Renamed + 1);
+				Assert.AreEqual((float) doc + base_Renamed + 1, scorer.Score());
 			}
 			public override void  SetNextReader(IndexReader reader, int docBase)
 			{
@@ -142,7 +137,7 @@ namespace Lucene.Net.Search
 			public override void  Collect(int doc)
 			{
 				//System.out.println("Doc=" + doc + " score=" + score);
-				Assert.IsTrue(scorer.Score() == 1.0f);
+				Assert.AreEqual(1.0f, scorer.Score());
 			}
 			public override void  SetNextReader(IndexReader reader, int docBase)
 			{
@@ -179,7 +174,7 @@ namespace Lucene.Net.Search
 			public override void  Collect(int doc)
 			{
 				//System.out.println("Doc=" + doc + " score=" + score);
-				Assert.IsTrue(scorer.Score() == 2.0f);
+				Assert.AreEqual(2.0f, scorer.Score());
 			}
 			public override void  SetNextReader(IndexReader reader, int docBase)
 			{
@@ -190,8 +185,21 @@ namespace Lucene.Net.Search
 			}
 		}
 		
+        private class AnonymousIDFExplanation : Explanation.IDFExplanation
+        {
+            public override float GetIdf()
+            {
+                return 1.0f;
+            }
+
+            public override string Explain()
+            {
+                return "Inexplicable";
+            }
+        }
+
 		[Serializable]
-		public class SimpleSimilarity:Similarity
+		public class SimpleSimilarity : Similarity
 		{
 			public override float LengthNorm(System.String field, int numTerms)
 			{
@@ -209,10 +217,6 @@ namespace Lucene.Net.Search
 			{
 				return 2.0f;
 			}
-			public override float Idf(System.Collections.Generic.ICollection<Term> terms, Searcher searcher)
-			{
-				return 1.0f;
-			}
 			public override float Idf(int docFreq, int numDocs)
 			{
 				return 1.0f;
@@ -221,6 +225,10 @@ namespace Lucene.Net.Search
 			{
 				return 1.0f;
 			}
+            public override Explanation.IDFExplanation IdfExplain(System.Collections.Generic.ICollection<Term> terms, Searcher searcher)
+            {
+                return new AnonymousIDFExplanation();
+            }
 		}
 		
 		[Test]
@@ -241,7 +249,7 @@ namespace Lucene.Net.Search
 			writer.Optimize();
 			writer.Close();
 			
-			Searcher searcher = new IndexSearcher(store);
+			Searcher searcher = new IndexSearcher(store, true);
 			searcher.SetSimilarity(new SimpleSimilarity());
 			
 			Term a = new Term("field", "a");

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSimpleExplanations.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSimpleExplanations.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSimpleExplanations.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSimpleExplanations.cs Tue Nov 15 08:41:44 2011
@@ -415,8 +415,8 @@ namespace Lucene.Net.Search
 			Document lDoc3 = new Document();
 			lDoc3.Add(new Field("handle", "1 2", Field.Store.YES, Field.Index.ANALYZED));
 			
-			IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-			IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 			
 			writerA.AddDocument(lDoc);
 			writerA.AddDocument(lDoc2);
@@ -426,7 +426,7 @@ namespace Lucene.Net.Search
 			writerB.AddDocument(lDoc3);
 			writerB.Close();
 			
-			QueryParser parser = new QueryParser("fulltext", new StandardAnalyzer());
+			QueryParser parser = new QueryParser("fulltext", new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Query query = parser.Parse("handle:1");
 			
 			Searcher[] searchers = new Searcher[2];

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSort.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSort.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSort.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSort.cs Tue Nov 15 08:41:44 2011
@@ -37,14 +37,8 @@ namespace Lucene.Net.Search
 {
 	
 	/// <summary> Unit tests for sorting code.
-	/// 
 	/// <p/>Created: Feb 17, 2004 4:55:10 PM
-	/// 
 	/// </summary>
-	/// <since>   lucene 1.4
-	/// </since>
-	/// <version>  $Id: TestSort.java 803676 2009-08-12 19:31:38Z hossman $
-	/// </version>
 	
 	[Serializable]
     [TestFixture]
@@ -322,7 +316,7 @@ namespace Lucene.Net.Search
 			}
 			//writer.optimize ();
 			writer.Close();
-			IndexSearcher s = new IndexSearcher(indexStore);
+			IndexSearcher s = new IndexSearcher(indexStore, true);
 			s.SetDefaultFieldSortScoring(true, true);
 			return s;
 		}
@@ -355,7 +349,7 @@ namespace Lucene.Net.Search
 			//writer.optimize ();
 			//System.out.println(writer.getSegmentCount());
 			writer.Close();
-			return new IndexSearcher(indexStore);
+			return new IndexSearcher(indexStore, true);
 		}
 		
 		public virtual System.String GetRandomNumberString(int num, int low, int high)
@@ -441,31 +435,31 @@ namespace Lucene.Net.Search
 		[Test]
 		public virtual void  TestTypedSort()
 		{
-            sort.SetSort(new SortField[] { new SortField("int", SortField.INT), SortField.FIELD_DOC });
+		    sort.SetSort(new SortField("int", SortField.INT), SortField.FIELD_DOC);
             AssertMatches(full, queryX, sort, "IGAEC");
             AssertMatches(full, queryY, sort, "DHFJB");
 
-            sort.SetSort(new SortField[] { new SortField("float", SortField.FLOAT), SortField.FIELD_DOC });
+		    sort.SetSort(new SortField("float", SortField.FLOAT), SortField.FIELD_DOC);
             AssertMatches(full, queryX, sort, "GCIEA");
             AssertMatches(full, queryY, sort, "DHJFB");
 
-            sort.SetSort(new SortField[] { new SortField("long", SortField.LONG), SortField.FIELD_DOC });
+		    sort.SetSort(new SortField("long", SortField.LONG), SortField.FIELD_DOC);
             AssertMatches(full, queryX, sort, "EACGI");
             AssertMatches(full, queryY, sort, "FBJHD");
 
-            sort.SetSort(new SortField[] { new SortField("double", SortField.DOUBLE), SortField.FIELD_DOC });
+		    sort.SetSort(new SortField("double", SortField.DOUBLE), SortField.FIELD_DOC);
             AssertMatches(full, queryX, sort, "AGICE");
             AssertMatches(full, queryY, sort, "DJHBF");
 			
-			sort.SetSort(new SortField[]{new SortField("byte", SortField.BYTE), SortField.FIELD_DOC});
+			sort.SetSort(new SortField("byte", SortField.BYTE), SortField.FIELD_DOC);
 			AssertMatches(full, queryX, sort, "CIGAE");
 			AssertMatches(full, queryY, sort, "DHFBJ");
 			
-			sort.SetSort(new SortField[]{new SortField("short", SortField.SHORT), SortField.FIELD_DOC});
+			sort.SetSort(new SortField("short", SortField.SHORT), SortField.FIELD_DOC);
 			AssertMatches(full, queryX, sort, "IAGCE");
 			AssertMatches(full, queryY, sort, "DFHBJ");
 			
-			sort.SetSort(new SortField[]{new SortField("string", SortField.STRING), SortField.FIELD_DOC});
+			sort.SetSort(new SortField("string", SortField.STRING), SortField.FIELD_DOC);
 			AssertMatches(full, queryX, sort, "AIGEC");
 			AssertMatches(full, queryY, sort, "DJHFB");
 		}
@@ -477,7 +471,8 @@ namespace Lucene.Net.Search
 			r = NewRandom();
 			ScoreDoc[] result = null;
 			IndexSearcher searcher = GetFullStrings();
-			sort.SetSort(new SortField[]{new SortField("string", SortField.STRING), new SortField("string2", SortField.STRING, true), SortField.FIELD_DOC});
+		    sort.SetSort(new SortField("string", SortField.STRING), new SortField("string2", SortField.STRING, true),
+		                 SortField.FIELD_DOC);
 			
 			result = searcher.Search(new MatchAllDocsQuery(), null, 500, sort).ScoreDocs;
 			
@@ -549,32 +544,32 @@ namespace Lucene.Net.Search
 			FieldCache fc = Lucene.Net.Search.FieldCache_Fields.DEFAULT;
 			
 			
-			sort.SetSort(new SortField[]{new SortField("parser", new AnonymousClassIntParser(this)), SortField.FIELD_DOC});
+			sort.SetSort(new SortField("parser", new AnonymousClassIntParser(this)), SortField.FIELD_DOC);
 			AssertMatches(full, queryA, sort, "JIHGFEDCBA");
             AssertSaneFieldCaches(Lucene.Net.TestCase.GetName() + " IntParser"); 
 			fc.PurgeAllCaches();
 			
-			sort.SetSort(new SortField[]{new SortField("parser", new AnonymousClassFloatParser(this)), SortField.FIELD_DOC});
+			sort.SetSort(new SortField("parser", new AnonymousClassFloatParser(this)), SortField.FIELD_DOC);
 			AssertMatches(full, queryA, sort, "JIHGFEDCBA");
             AssertSaneFieldCaches(Lucene.Net.TestCase.GetName() + " FloatParser"); 
 			fc.PurgeAllCaches();
 			
-			sort.SetSort(new SortField[]{new SortField("parser", new AnonymousClassLongParser(this)), SortField.FIELD_DOC});
+			sort.SetSort(new SortField("parser", new AnonymousClassLongParser(this)), SortField.FIELD_DOC);
 			AssertMatches(full, queryA, sort, "JIHGFEDCBA");
             AssertSaneFieldCaches(Lucene.Net.TestCase.GetName() + " LongParser"); 
 			fc.PurgeAllCaches();
 			
-			sort.SetSort(new SortField[]{new SortField("parser", new AnonymousClassDoubleParser(this)), SortField.FIELD_DOC});
+			sort.SetSort(new SortField("parser", new AnonymousClassDoubleParser(this)), SortField.FIELD_DOC);
 			AssertMatches(full, queryA, sort, "JIHGFEDCBA");
             AssertSaneFieldCaches(Lucene.Net.TestCase.GetName() + " DoubleParser"); 
 			fc.PurgeAllCaches();
 			
-			sort.SetSort(new SortField[]{new SortField("parser", new AnonymousClassByteParser(this)), SortField.FIELD_DOC});
+			sort.SetSort(new SortField("parser", new AnonymousClassByteParser(this)), SortField.FIELD_DOC);
 			AssertMatches(full, queryA, sort, "JIHGFEDCBA");
             AssertSaneFieldCaches(Lucene.Net.TestCase.GetName() + " ByteParser"); 
 			fc.PurgeAllCaches();
 			
-			sort.SetSort(new SortField[]{new SortField("parser", new AnonymousClassShortParser(this)), SortField.FIELD_DOC});
+			sort.SetSort(new SortField("parser", new AnonymousClassShortParser(this)), SortField.FIELD_DOC);
 			AssertMatches(full, queryA, sort, "JIHGFEDCBA");
             AssertSaneFieldCaches(Lucene.Net.TestCase.GetName() + " ShortParser"); 
 			fc.PurgeAllCaches();
@@ -592,13 +587,13 @@ namespace Lucene.Net.Search
 			sort.SetSort(SortField.FIELD_DOC);
 			AssertMatches(empty, queryX, sort, "");
 			
-			sort.SetSort(new SortField[]{new SortField("int", SortField.INT), SortField.FIELD_DOC});
+			sort.SetSort(new SortField("int", SortField.INT), SortField.FIELD_DOC);
 			AssertMatches(empty, queryX, sort, "");
 			
-			sort.SetSort(new SortField[]{new SortField("string", SortField.STRING, true), SortField.FIELD_DOC});
+			sort.SetSort(new SortField("string", SortField.STRING, true), SortField.FIELD_DOC);
 			AssertMatches(empty, queryX, sort, "");
 			
-			sort.SetSort(new SortField[]{new SortField("float", SortField.FLOAT), new SortField("string", SortField.STRING)});
+			sort.SetSort(new SortField("float", SortField.FLOAT), new SortField("string", SortField.STRING));
 			AssertMatches(empty, queryX, sort, "");
 		}
 		
@@ -682,32 +677,15 @@ namespace Lucene.Net.Search
 		[Test]
 		public virtual void  TestNewCustomFieldParserSort()
 		{
-			sort.SetSort(new SortField[]{new SortField("parser", new MyFieldComparatorSource())});
+			sort.SetSort(new SortField("parser", new MyFieldComparatorSource()));
 			AssertMatches(full, queryA, sort, "JIHGFEDCBA");
 		}
-		
-		// test sorts where the type of field is determined dynamically
-		[Test]
-		public virtual void  TestAutoSort()
-		{
-			sort.SetSort("int");
-			AssertMatches(full, queryX, sort, "IGAEC");
-			AssertMatches(full, queryY, sort, "DHFJB");
-			
-			sort.SetSort("float");
-			AssertMatches(full, queryX, sort, "GCIEA");
-			AssertMatches(full, queryY, sort, "DHJFB");
-			
-			sort.SetSort("string");
-			AssertMatches(full, queryX, sort, "AIGEC");
-			AssertMatches(full, queryY, sort, "DJHFB");
-		}
-		
+
 		// test sorts in reverse
 		[Test]
 		public virtual void  TestReverseSort()
 		{
-			sort.SetSort(new SortField[]{new SortField(null, SortField.SCORE, true), SortField.FIELD_DOC});
+			sort.SetSort(new SortField(null, SortField.SCORE, true), SortField.FIELD_DOC);
 			AssertMatches(full, queryX, sort, "IEGCA");
 			AssertMatches(full, queryY, sort, "JFHDB");
 			
@@ -715,15 +693,15 @@ namespace Lucene.Net.Search
 			AssertMatches(full, queryX, sort, "IGECA");
 			AssertMatches(full, queryY, sort, "JHFDB");
 			
-			sort.SetSort("int", true);
+			sort.SetSort(new SortField("int", SortField.INT, true));
 			AssertMatches(full, queryX, sort, "CAEGI");
 			AssertMatches(full, queryY, sort, "BJFHD");
 			
-			sort.SetSort("float", true);
+			sort.SetSort(new SortField("float", SortField.FLOAT, true));
 			AssertMatches(full, queryX, sort, "AECIG");
 			AssertMatches(full, queryY, sort, "BFJHD");
 			
-			sort.SetSort("string", true);
+			sort.SetSort(new SortField("string", SortField.STRING, true));
 			AssertMatches(full, queryX, sort, "CEGIA");
 			AssertMatches(full, queryY, sort, "BFHJD");
 		}
@@ -732,10 +710,10 @@ namespace Lucene.Net.Search
 		[Test]
 		public virtual void  TestEmptyFieldSort()
 		{
-			sort.SetSort("string");
+			sort.SetSort(new SortField("string", SortField.STRING));
 			AssertMatches(full, queryF, sort, "ZJI");
 			
-			sort.SetSort("string", true);
+			sort.SetSort(new SortField("string", SortField.STRING, true));
 			AssertMatches(full, queryF, sort, "IJZ");
 			
 			sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en")));
@@ -744,48 +722,51 @@ namespace Lucene.Net.Search
 			sort.SetSort(new SortField("i18n", new System.Globalization.CultureInfo("en"), true));
 			AssertMatches(full, queryF, sort, "IJZ");
 			
-			sort.SetSort("int");
+			sort.SetSort(new SortField("int", SortField.INT));
 			AssertMatches(full, queryF, sort, "IZJ");
 			
-			sort.SetSort("int", true);
+			sort.SetSort(new SortField("int", SortField.INT, true));
 			AssertMatches(full, queryF, sort, "JZI");
 			
-			sort.SetSort("float");
+			sort.SetSort(new SortField("float", SortField.FLOAT));
 			AssertMatches(full, queryF, sort, "ZJI");
 			
 			// using a nonexisting field as first sort key shouldn't make a difference:
-			sort.SetSort(new SortField[]{new SortField("nosuchfield", SortField.STRING), new SortField("float")});
+		    sort.SetSort(new SortField("nosuchfield", SortField.STRING),
+		                 new SortField("float", SortField.FLOAT));
 			AssertMatches(full, queryF, sort, "ZJI");
 			
-			sort.SetSort("float", true);
+			sort.SetSort(new SortField("float", SortField.FLOAT, true));
 			AssertMatches(full, queryF, sort, "IJZ");
 			
 			// When a field is null for both documents, the next SortField should be used.
 			// Works for
-			sort.SetSort(new SortField[]{new SortField("int"), new SortField("string", SortField.STRING), new SortField("float")});
+		    sort.SetSort(new SortField("int", SortField.INT), new SortField("string", SortField.STRING),
+		                 new SortField("float", SortField.FLOAT));
 			AssertMatches(full, queryG, sort, "ZWXY");
 			
 			// Reverse the last criterium to make sure the test didn't pass by chance
-			sort.SetSort(new SortField[]{new SortField("int"), new SortField("string", SortField.STRING), new SortField("float", true)});
+		    sort.SetSort(new SortField("int", SortField.INT), new SortField("string", SortField.STRING),
+		                 new SortField("float", SortField.FLOAT, true));
 			AssertMatches(full, queryG, sort, "ZYXW");
 			
 			// Do the same for a MultiSearcher
 			Searcher multiSearcher = new MultiSearcher(new Searchable[]{full});
-			
-			sort.SetSort(new SortField[]{new SortField("int"), new SortField("string", SortField.STRING), new SortField("float")});
+
+            sort.SetSort(new SortField("int", SortField.INT), new SortField("string", SortField.STRING), new SortField("float", SortField.FLOAT));
 			AssertMatches(multiSearcher, queryG, sort, "ZWXY");
-			
-			sort.SetSort(new SortField[]{new SortField("int"), new SortField("string", SortField.STRING), new SortField("float", true)});
+
+            sort.SetSort(new SortField("int", SortField.INT), new SortField("string", SortField.STRING), new SortField("float", SortField.FLOAT, true));
 			AssertMatches(multiSearcher, queryG, sort, "ZYXW");
 			// Don't close the multiSearcher. it would close the full searcher too!
 			
 			// Do the same for a ParallelMultiSearcher
 			Searcher parallelSearcher = new ParallelMultiSearcher(new Searchable[]{full});
-			
-			sort.SetSort(new SortField[]{new SortField("int"), new SortField("string", SortField.STRING), new SortField("float")});
+
+            sort.SetSort(new SortField("int", SortField.INT), new SortField("string", SortField.STRING), new SortField("float", SortField.FLOAT));
 			AssertMatches(parallelSearcher, queryG, sort, "ZWXY");
-			
-			sort.SetSort(new SortField[]{new SortField("int"), new SortField("string", SortField.STRING), new SortField("float", true)});
+
+            sort.SetSort(new SortField("int", SortField.INT), new SortField("string", SortField.STRING), new SortField("float", SortField.FLOAT, true));
 			AssertMatches(parallelSearcher, queryG, sort, "ZYXW");
 			// Don't close the parallelSearcher. it would close the full searcher too!
 		}
@@ -794,13 +775,13 @@ namespace Lucene.Net.Search
 		[Test]
 		public virtual void  TestSortCombos()
 		{
-			sort.SetSort(new System.String[]{"int", "float"});
+			sort.SetSort(new SortField("int", SortField.INT), new SortField("float", SortField.FLOAT));
 			AssertMatches(full, queryX, sort, "IGEAC");
 			
-			sort.SetSort(new SortField[]{new SortField("int", true), new SortField(null, SortField.DOC, true)});
+			sort.SetSort(new SortField[]{new SortField("int", SortField.INT, true), new SortField(null, SortField.DOC, true)});
 			AssertMatches(full, queryX, sort, "CEAGI");
 			
-			sort.SetSort(new System.String[]{"float", "string"});
+			sort.SetSort(new SortField("float", SortField.FLOAT), new SortField("string", SortField.STRING));
 			AssertMatches(full, queryX, sort, "GICEA");
 		}
 		
@@ -808,11 +789,11 @@ namespace Lucene.Net.Search
 		[Test]
 		public virtual void  TestLocaleSort()
 		{
-			sort.SetSort(new SortField[]{new SortField("string", new System.Globalization.CultureInfo("en-US"))});
+			sort.SetSort(new SortField("string", new System.Globalization.CultureInfo("en-US")));
 			AssertMatches(full, queryX, sort, "AIGEC");
 			AssertMatches(full, queryY, sort, "DJHFB");
 			
-			sort.SetSort(new SortField[]{new SortField("string", new System.Globalization.CultureInfo("en-US"), true)});
+			sort.SetSort(new SortField("string", new System.Globalization.CultureInfo("en-US"), true));
 			AssertMatches(full, queryX, sort, "CEGIA");
 			AssertMatches(full, queryY, sort, "BFHJD");
 		}
@@ -855,21 +836,6 @@ namespace Lucene.Net.Search
 			AssertMatches(multiSearcher, queryY, sort, "BJDHF");
 		}
 		
-		// test a custom sort function
-		[Test]
-		public virtual void  TestCustomSorts()
-		{
-			sort.SetSort(new SortField("custom", SampleComparable.GetComparatorSource()));
-			AssertMatches(full, queryX, sort, "CAIEG");
-			sort.SetSort(new SortField("custom", SampleComparable.GetComparatorSource(), true));
-			AssertMatches(full, queryY, sort, "HJDBF");
-			SortComparator custom = SampleComparable.GetComparator();
-			sort.SetSort(new SortField("custom", custom));
-			AssertMatches(full, queryX, sort, "CAIEG");
-			sort.SetSort(new SortField("custom", custom, true));
-			AssertMatches(full, queryY, sort, "HJDBF");
-		}
-		
 		// test a variety of sorts using more than one searcher
 		[Test]
 		public virtual void  TestMultiSort()
@@ -919,7 +885,7 @@ namespace Lucene.Net.Search
 			AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).ScoreDocs, full));
 			AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).ScoreDocs, multi));
 			
-			sort.SetSort("int");
+			sort.SetSort(new SortField("int", SortField.INT));
 			AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).ScoreDocs, full));
 			AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).ScoreDocs, multi));
 			AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).ScoreDocs, full));
@@ -927,7 +893,7 @@ namespace Lucene.Net.Search
 			AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).ScoreDocs, full));
 			AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).ScoreDocs, multi));
 			
-			sort.SetSort("float");
+			sort.SetSort(new SortField("float", SortField.FLOAT));
 			AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).ScoreDocs, full));
 			AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).ScoreDocs, multi));
 			AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).ScoreDocs, full));
@@ -935,7 +901,7 @@ namespace Lucene.Net.Search
 			AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).ScoreDocs, full));
 			AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).ScoreDocs, multi));
 			
-			sort.SetSort("string");
+			sort.SetSort(new SortField("string", SortField.STRING));
 			AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).ScoreDocs, full));
 			AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).ScoreDocs, multi));
 			AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).ScoreDocs, full));
@@ -943,7 +909,7 @@ namespace Lucene.Net.Search
 			AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).ScoreDocs, full));
 			AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).ScoreDocs, multi));
 			
-			sort.SetSort(new System.String[]{"int", "float"});
+			sort.SetSort(new SortField("int", SortField.INT), new SortField("float", SortField.FLOAT));
 			AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).ScoreDocs, full));
 			AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).ScoreDocs, multi));
 			AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).ScoreDocs, full));
@@ -951,7 +917,7 @@ namespace Lucene.Net.Search
 			AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).ScoreDocs, full));
 			AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).ScoreDocs, multi));
 			
-			sort.SetSort(new SortField[]{new SortField("int", true), new SortField(null, SortField.DOC, true)});
+			sort.SetSort(new SortField("int", SortField.INT, true), new SortField(null, SortField.DOC, true));
 			AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).ScoreDocs, full));
 			AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).ScoreDocs, multi));
 			AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).ScoreDocs, full));
@@ -959,7 +925,7 @@ namespace Lucene.Net.Search
 			AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).ScoreDocs, full));
 			AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).ScoreDocs, multi));
 			
-			sort.SetSort(new System.String[]{"float", "string"});
+			sort.SetSort(new SortField("int", SortField.INT), new SortField("string", SortField.STRING));
 			AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).ScoreDocs, full));
 			AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).ScoreDocs, multi));
 			AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).ScoreDocs, full));
@@ -1002,7 +968,7 @@ namespace Lucene.Net.Search
 			for (int i = 0; i < sort.Length; i++)
 			{
 				Query q = new MatchAllDocsQuery();
-				TopDocsCollector tdc = TopFieldCollector.create(sort[i], 10, false, false, false, true);
+                TopFieldCollector tdc = TopFieldCollector.create(sort[i], 10, false, false, false, true);
 				
 				full.Search(q, tdc);
 				
@@ -1023,7 +989,7 @@ namespace Lucene.Net.Search
 			for (int i = 0; i < sort.Length; i++)
 			{
 				Query q = new MatchAllDocsQuery();
-				TopDocsCollector tdc = TopFieldCollector.create(sort[i], 10, true, false, false, true);
+                TopFieldCollector tdc = TopFieldCollector.create(sort[i], 10, true, false, false, true);
 				
 				full.Search(q, tdc);
 				
@@ -1046,7 +1012,7 @@ namespace Lucene.Net.Search
 			for (int i = 0; i < sort.Length; i++)
 			{
 				Query q = new MatchAllDocsQuery();
-				TopDocsCollector tdc = TopFieldCollector.create(sort[i], 10, true, true, false, true);
+                TopDocsCollector<FieldValueHitQueue.Entry> tdc = TopFieldCollector.create(sort[i], 10, true, true, false, true);
 				
 				full.Search(q, tdc);
 				
@@ -1069,7 +1035,7 @@ namespace Lucene.Net.Search
 			for (int i = 0; i < sort.Length; i++)
 			{
 				Query q = new MatchAllDocsQuery();
-				TopDocsCollector tdc = TopFieldCollector.create(sort[i], 10, true, true, true, true);
+                TopFieldCollector tdc = TopFieldCollector.create(sort[i], 10, true, true, true, true);
 				
 				full.Search(q, tdc);
 				
@@ -1090,12 +1056,17 @@ namespace Lucene.Net.Search
 			// Two Sort criteria to instantiate the multi/single comparators.
 			Sort[] sort = new Sort[]{new Sort(SortField.FIELD_DOC), new Sort()};
 			bool[][] tfcOptions = new bool[][]{new bool[]{false, false, false}, new bool[]{false, false, true}, new bool[]{false, true, false}, new bool[]{false, true, true}, new bool[]{true, false, false}, new bool[]{true, false, true}, new bool[]{true, true, false}, new bool[]{true, true, true}};
-			System.String[] actualTFCClasses = new System.String[]{"OutOfOrderOneComparatorNonScoringCollector", "OutOfOrderOneComparatorScoringMaxScoreCollector", "OutOfOrderOneComparatorScoringNoMaxScoreCollector", "OutOfOrderOneComparatorScoringMaxScoreCollector", "OutOfOrderOneComparatorNonScoringCollector", "OutOfOrderOneComparatorScoringMaxScoreCollector", "OutOfOrderOneComparatorScoringNoMaxScoreCollector", "OutOfOrderOneComparatorScoringMaxScoreCollector"};
-			
-			// Save the original value to set later.
-			bool origVal = BooleanQuery.GetAllowDocsOutOfOrder();
-			
-			BooleanQuery.SetAllowDocsOutOfOrder(true);
+		    System.String[] actualTFCClasses = new System.String[]
+		                                           {
+		                                               "OutOfOrderOneComparatorNonScoringCollector",
+		                                               "OutOfOrderOneComparatorScoringMaxScoreCollector",
+		                                               "OutOfOrderOneComparatorScoringNoMaxScoreCollector",
+		                                               "OutOfOrderOneComparatorScoringMaxScoreCollector",
+		                                               "OutOfOrderOneComparatorNonScoringCollector",
+		                                               "OutOfOrderOneComparatorScoringMaxScoreCollector",
+		                                               "OutOfOrderOneComparatorScoringNoMaxScoreCollector",
+		                                               "OutOfOrderOneComparatorScoringMaxScoreCollector"
+		                                           };
 			
 			BooleanQuery bq = new BooleanQuery();
 			// Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
@@ -1104,31 +1075,23 @@ namespace Lucene.Net.Search
 			// Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
 			// the clause instead of BQ.
 			bq.SetMinimumNumberShouldMatch(1);
-			try
-			{
-				for (int i = 0; i < sort.Length; i++)
-				{
-					for (int j = 0; j < tfcOptions.Length; j++)
-					{
-						TopDocsCollector tdc = TopFieldCollector.create(sort[i], 10, tfcOptions[j][0], tfcOptions[j][1], tfcOptions[j][2], false);
-						
-						Assert.IsTrue(tdc.GetType().FullName.EndsWith("+" + actualTFCClasses[j]));
-						
-						full.Search(bq, tdc);
-						
-						TopDocs td = tdc.TopDocs();
-						ScoreDoc[] sd = td.ScoreDocs;
-						Assert.AreEqual(10, sd.Length);
-					}
-				}
-			}
-			finally
-			{
-				// Whatever happens, reset BooleanQuery.allowDocsOutOfOrder to the
-				// original value. Don't set it to false in case the implementation in BQ
-				// will change some day.
-				BooleanQuery.SetAllowDocsOutOfOrder(origVal);
-			}
+
+            for (int i = 0; i < sort.Length; i++)
+            {
+                for (int j = 0; j < tfcOptions.Length; j++)
+                {
+                    TopFieldCollector tdc = TopFieldCollector.create(sort[i], 10, tfcOptions[j][0], tfcOptions[j][1],
+                                                                     tfcOptions[j][2], false);
+
+                    Assert.IsTrue(tdc.GetType().FullName.EndsWith("+" + actualTFCClasses[j]));
+
+                    full.Search(bq, tdc);
+
+                    TopDocs td = tdc.TopDocs();
+                    ScoreDoc[] sd = td.ScoreDocs;
+                    Assert.AreEqual(10, sd.Length);
+                }
+            }
 		}
 		
 		[Test]
@@ -1139,7 +1102,7 @@ namespace Lucene.Net.Search
 			Sort[] sort = new Sort[]{new Sort(SortField.FIELD_DOC), new Sort()};
 			for (int i = 0; i < sort.Length; i++)
 			{
-				TopDocsCollector tdc = TopFieldCollector.create(sort[i], 10, true, true, true, true);
+                TopFieldCollector tdc = TopFieldCollector.create(sort[i], 10, true, true, true, true);
 				TopDocs td = tdc.TopDocs();
 				Assert.AreEqual(0, td.TotalHits);
 				Assert.IsTrue(System.Single.IsNaN(td.GetMaxScore()));
@@ -1157,52 +1120,52 @@ namespace Lucene.Net.Search
 			expected = isFull?"IDHFGJABEC":"IDHFGJAEBC";
 			AssertMatches(multi, queryA, sort, expected);
 			
-			sort.SetSort(new SortField[]{new SortField("int", SortField.INT), SortField.FIELD_DOC});
+			sort.SetSort(new SortField("int", SortField.INT), SortField.FIELD_DOC);
 			expected = isFull?"IDHFGJABEC":"IDHFGJAEBC";
 			AssertMatches(multi, queryA, sort, expected);
 			
-			sort.SetSort("int");
+			sort.SetSort(new SortField("int", SortField.INT));
 			expected = isFull?"IDHFGJABEC":"IDHFGJAEBC";
 			AssertMatches(multi, queryA, sort, expected);
 			
-			sort.SetSort(new SortField[]{new SortField("float", SortField.FLOAT), SortField.FIELD_DOC});
+			sort.SetSort(new SortField("float", SortField.FLOAT), SortField.FIELD_DOC);
 			AssertMatches(multi, queryA, sort, "GDHJCIEFAB");
 			
-			sort.SetSort("float");
+			sort.SetSort(new SortField("float", SortField.FLOAT));
 			AssertMatches(multi, queryA, sort, "GDHJCIEFAB");
 			
-			sort.SetSort("string");
+			sort.SetSort(new SortField("string", SortField.STRING));
 			AssertMatches(multi, queryA, sort, "DJAIHGFEBC");
 			
-			sort.SetSort("int", true);
+			sort.SetSort(new SortField("int", SortField.INT, true));
 			expected = isFull?"CABEJGFHDI":"CAEBJGFHDI";
 			AssertMatches(multi, queryA, sort, expected);
 			
-			sort.SetSort("float", true);
+			sort.SetSort(new SortField("float", SortField.FLOAT, true));
 			AssertMatches(multi, queryA, sort, "BAFECIJHDG");
 			
-			sort.SetSort("string", true);
+			sort.SetSort(new SortField("string", SortField.STRING, true));
 			AssertMatches(multi, queryA, sort, "CBEFGHIAJD");
 			
-			sort.SetSort(new System.String[]{"int", "float"});
+			sort.SetSort(new SortField("int", SortField.INT), new SortField("float", SortField.FLOAT));
 			AssertMatches(multi, queryA, sort, "IDHFGJEABC");
 			
-			sort.SetSort(new System.String[]{"float", "string"});
+			sort.SetSort(new SortField("float", SortField.FLOAT), new SortField("string", SortField.STRING));
 			AssertMatches(multi, queryA, sort, "GDHJICEFAB");
 			
-			sort.SetSort("int");
+			sort.SetSort(new SortField("int", SortField.INT));
 			AssertMatches(multi, queryF, sort, "IZJ");
 			
-			sort.SetSort("int", true);
+			sort.SetSort(new SortField("int", SortField.INT, true));
 			AssertMatches(multi, queryF, sort, "JZI");
 			
-			sort.SetSort("float");
+			sort.SetSort(new SortField("float", SortField.FLOAT));
 			AssertMatches(multi, queryF, sort, "ZJI");
 			
-			sort.SetSort("string");
+			sort.SetSort(new SortField("string", SortField.STRING));
 			AssertMatches(multi, queryF, sort, "ZJI");
 			
-			sort.SetSort("string", true);
+			sort.SetSort(new SortField("string", SortField.STRING, true));
 			AssertMatches(multi, queryF, sort, "IJZ");
 			
 			// up to this point, all of the searches should have "sane" 
@@ -1211,13 +1174,13 @@ namespace Lucene.Net.Search
 			// next we'll check Locale based (String[]) for 'string', so purge first
 			Lucene.Net.Search.FieldCache_Fields.DEFAULT.PurgeAllCaches();
 			
-			sort.SetSort(new SortField[]{new SortField("string", new System.Globalization.CultureInfo("en-US"))});
+			sort.SetSort(new SortField("string", new System.Globalization.CultureInfo("en-US")));
 			AssertMatches(multi, queryA, sort, "DJAIHGFEBC");
 			
-			sort.SetSort(new SortField[]{new SortField("string", new System.Globalization.CultureInfo("en-US"), true)});
+			sort.SetSort(new SortField("string", new System.Globalization.CultureInfo("en-US"), true));
 			AssertMatches(multi, queryA, sort, "CBEFGHIAJD");
 			
-			sort.SetSort(new SortField[]{new SortField("string", new System.Globalization.CultureInfo("en-GB"))});
+			sort.SetSort(new SortField("string", new System.Globalization.CultureInfo("en-GB")));
 			AssertMatches(multi, queryA, sort, "DJAIHGFEBC");
 
             AssertSaneFieldCaches(Lucene.Net.TestCase.GetName() + " Locale.US + Locale.UK"); 
@@ -1296,7 +1259,7 @@ namespace Lucene.Net.Search
             }
             writer.Optimize(); // enforce one segment to have a higher unique term count in all cases
             writer.Close();
-            sort.SetSort(new SortField[]{new SortField("string", SortField.STRING),SortField.FIELD_DOC });
+            sort.SetSort(new SortField("string", SortField.STRING),SortField.FIELD_DOC);
             // this should not throw AIOOBE or RuntimeEx
             new IndexSearcher(indexStore, true).Search(new MatchAllDocsQuery(), null, 500, sort);
         }

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestTermRangeQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestTermRangeQuery.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestTermRangeQuery.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestTermRangeQuery.cs Tue Nov 15 08:41:44 2011
@@ -266,7 +266,7 @@ namespace Lucene.Net.Search
 				
 				public SingleCharTokenizer(System.IO.TextReader r):base(r)
 				{
-					termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
+					termAtt =  AddAttribute<TermAttribute>();
 				}
 				
 				public override bool IncrementToken()

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestTopDocsCollector.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestTopDocsCollector.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestTopDocsCollector.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestTopDocsCollector.cs Tue Nov 15 08:41:44 2011
@@ -137,7 +137,7 @@ namespace Lucene.Net.Search
 		public virtual void  TestInvalidArguments()
 		{
 			int numResults = 5;
-			TopDocsCollector tdc = doSearch(numResults);
+			TopDocsCollector<ScoreDoc> tdc = doSearch(numResults);
 			
 			// start < 0
 			Assert.AreEqual(0, tdc.TopDocs(- 1).ScoreDocs.Length);
@@ -158,21 +158,21 @@ namespace Lucene.Net.Search
         [Test]
 		public virtual void  TestZeroResults()
 		{
-			TopDocsCollector tdc = new MyTopsDocCollector(5);
+			TopDocsCollector<ScoreDoc> tdc = new MyTopsDocCollector(5);
 			Assert.AreEqual(0, tdc.TopDocs(0, 1).ScoreDocs.Length);
 		}
 		
         [Test]
 		public virtual void  TestFirstResultsPage()
 		{
-			TopDocsCollector tdc = doSearch(15);
+			TopDocsCollector<ScoreDoc> tdc = doSearch(15);
 			Assert.AreEqual(10, tdc.TopDocs(0, 10).ScoreDocs.Length);
 		}
 		
         [Test]
 		public virtual void  TestSecondResultsPages()
 		{
-			TopDocsCollector tdc = doSearch(15);
+			TopDocsCollector<ScoreDoc> tdc = doSearch(15);
 			// ask for more results than are available
 			Assert.AreEqual(5, tdc.TopDocs(10, 10).ScoreDocs.Length);
 			
@@ -188,14 +188,14 @@ namespace Lucene.Net.Search
         [Test]
 		public virtual void  TestGetAllResults()
 		{
-			TopDocsCollector tdc = doSearch(15);
+			TopDocsCollector<ScoreDoc> tdc = doSearch(15);
 			Assert.AreEqual(15, tdc.TopDocs().ScoreDocs.Length);
 		}
 		
         [Test]
 		public virtual void  TestGetResultsFromStart()
 		{
-			TopDocsCollector tdc = doSearch(15);
+			TopDocsCollector<ScoreDoc> tdc = doSearch(15);
 			// should bring all results
 			Assert.AreEqual(15, tdc.TopDocs(0).ScoreDocs.Length);
 			
@@ -208,7 +208,7 @@ namespace Lucene.Net.Search
 		public virtual void  TestMaxScore()
 		{
 			// ask for all results
-			TopDocsCollector tdc = doSearch(15);
+			TopDocsCollector<ScoreDoc> tdc = doSearch(15);
 			TopDocs td = tdc.TopDocs();
 			Assert.AreEqual(MAX_SCORE, td.GetMaxScore(), 0f);
 			
@@ -223,7 +223,7 @@ namespace Lucene.Net.Search
         [Test]
 		public virtual void  TestResultsOrder()
 		{
-			TopDocsCollector tdc = doSearch(15);
+			TopDocsCollector<ScoreDoc> tdc = doSearch(15);
 			ScoreDoc[] sd = tdc.TopDocs().ScoreDocs;
 			
 			Assert.AreEqual(MAX_SCORE, sd[0].score, 0f);

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestWildcard.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestWildcard.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestWildcard.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestWildcard.cs Tue Nov 15 08:41:44 2011
@@ -194,7 +194,7 @@ namespace Lucene.Net.Search
 		{
 			System.String field = "content";
 			bool dbg = false;
-			QueryParser qp = new QueryParser(field, new WhitespaceAnalyzer());
+			QueryParser qp = new QueryParser(Util.Version.LUCENE_CURRENT, field, new WhitespaceAnalyzer());
 			qp.SetAllowLeadingWildcard(true);
 			System.String[] docs = new System.String[]{"\\ abcdefg1", "\\79 hijklmn1", "\\\\ opqrstu1"};
 			// queries that should find all docs

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Store/MockRAMDirectory.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Store/MockRAMDirectory.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Store/MockRAMDirectory.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Store/MockRAMDirectory.cs Tue Nov 15 08:41:44 2011
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using System.Collections.Generic;
 using NUnit.Framework;
 
 namespace Lucene.Net.Store
@@ -25,9 +25,6 @@ namespace Lucene.Net.Store
     /// <summary> This is a subclass of RAMDirectory that adds methods
     /// intended to be used only by unit tests.
     /// </summary>
-    /// <version>  $Id: RAMDirectory.java 437897 2006-08-29 01:13:10Z yonik $
-    /// </version>
-
     [Serializable]
     public class MockRAMDirectory : RAMDirectory
     {
@@ -39,19 +36,19 @@ namespace Lucene.Net.Store
         Random randomState;
         internal bool noDeleteOpenFile = true;
         internal bool preventDoubleWrite = true;
-        private System.Collections.Hashtable unSyncedFiles;
-        private System.Collections.Hashtable createdFiles;
+        private ISet<string> unSyncedFiles;
+        private ISet<string> createdFiles;
         internal volatile bool crashed;
 
         // NOTE: we cannot initialize the Map here due to the
         // order in which our constructor actually does this
         // member initialization vs when it calls super.  It seems
         // like super is called, then our members are initialized:
-        internal System.Collections.IDictionary openFiles;
+        internal IDictionary<string, int> openFiles;
 
         // Only tracked if noDeleteOpenFile is true: if an attempt
         // is made to delete an open file, we enroll it here.
-        internal System.Collections.Hashtable openFilesDeleted;
+        internal ISet<string> openFilesDeleted;
 
         private void Init()
         {
@@ -59,14 +56,14 @@ namespace Lucene.Net.Store
             {
                 if (openFiles == null)
                 {
-                    openFiles = new System.Collections.Hashtable();
-                    openFilesDeleted = new System.Collections.Hashtable();
+                    openFiles = new Dictionary<string, int>();
+                    openFilesDeleted = new HashSet<string>();
                 }
 
                 if (createdFiles == null)
-                    createdFiles = new System.Collections.Hashtable();
+                    createdFiles = new HashSet<string>();
                 if (unSyncedFiles == null)
-                    unSyncedFiles = new System.Collections.Hashtable();
+                    unSyncedFiles = new HashSet<string>();
             }
         }
 
@@ -75,21 +72,11 @@ namespace Lucene.Net.Store
         {
             Init();
         }
-        public MockRAMDirectory(String dir)
-            : base(dir)
-        {
-            Init();
-        }
         public MockRAMDirectory(Directory dir)
             : base(dir)
         {
             Init();
         }
-        public MockRAMDirectory(System.IO.FileInfo dir)
-            : base(dir)
-        {
-            Init();
-        }
 
         /** If set to true, we throw an IOException if the same
          *  file is opened by createOutput, ever. */
@@ -117,16 +104,15 @@ namespace Lucene.Net.Store
             lock (this)
             {
                 crashed = true;
-                openFiles = new System.Collections.Hashtable();
-                openFilesDeleted = new System.Collections.Hashtable();
-                System.Collections.IEnumerator it = unSyncedFiles.GetEnumerator();
-                unSyncedFiles = new System.Collections.Hashtable();
+                openFiles = new Dictionary<string, int>();
+                openFilesDeleted = new HashSet<string>();
+                var it = unSyncedFiles.GetEnumerator();
+                unSyncedFiles = new HashSet<string>();
                 int count = 0;
                 while (it.MoveNext())
                 {
-
-                    string name = (string)((System.Collections.DictionaryEntry)it.Current).Key;
-                    RAMFile file = (RAMFile)fileMap[name];
+                    string name = it.Current;
+                    RAMFile file = fileMap[name];
                     if (count % 3 == 0)
                     {
                         DeleteFile(name, true);
@@ -244,9 +230,9 @@ namespace Lucene.Net.Store
                     unSyncedFiles.Remove(name);
                 if (!forced && noDeleteOpenFile)
                 {
-                    if (openFiles.Contains(name))
+                    if (openFiles.ContainsKey(name))
                     {
-                        openFilesDeleted[name]=name;
+                        openFilesDeleted.Add(name);
                         throw new System.IO.IOException("MockRAMDirectory: file \"" + name + "\" is still open: cannot delete");
                     }
                     else
@@ -258,11 +244,11 @@ namespace Lucene.Net.Store
             }
         }
 
-        public System.Collections.IDictionary GetOpenDeletedFiles()
+        public ISet<string> GetOpenDeletedFiles()
         {
             lock (this)
             {
-                return new System.Collections.Hashtable(openFilesDeleted);
+                return new HashSet<string>(openFilesDeleted);
             }
         }
 
@@ -275,14 +261,14 @@ namespace Lucene.Net.Store
                 Init();
                 if (preventDoubleWrite && createdFiles.Contains(name) && !name.Equals("segments.gen"))
                     throw new System.IO.IOException("file \"" + name + "\" was already written to");
-                if (noDeleteOpenFile && openFiles.Contains(name))
+                if (noDeleteOpenFile && openFiles.ContainsKey(name))
                     throw new System.IO.IOException("MockRAMDirectory: file \"" + name + "\" is still open: cannot overwrite");
                 RAMFile file = new RAMFile(this);
                 if (crashed)
                     throw new System.IO.IOException("cannot createOutput after crash");
-                unSyncedFiles[name]=name;
-                createdFiles[name]=name;
-                RAMFile existing = (RAMFile)fileMap[name];
+                unSyncedFiles.Add(name);
+                createdFiles.Add(name);
+                RAMFile existing = fileMap[name];
                 // Enforce write once:
                 if (existing != null && !name.Equals("segments.gen") && preventDoubleWrite)
                     throw new System.IO.IOException("file " + name + " already exists");
@@ -305,20 +291,20 @@ namespace Lucene.Net.Store
         {
             lock (this)
             {
-                RAMFile file = (RAMFile)fileMap[name];
+                RAMFile file = fileMap[name];
                 if (file == null)
                     throw new System.IO.FileNotFoundException(name);
                 else
                 {
-                    if (openFiles.Contains(name))
+                    if (openFiles.ContainsKey(name))
                     {
-                        int v = (int)openFiles[name]; 
-                        v = (System.Int32)(v + 1);
-                        openFiles[name]= v;
+                        int v = openFiles[name]; 
+                        v = v + 1;
+                        openFiles[name] = v;
                     }
                     else
                     {
-                        openFiles[name]=1;
+                        openFiles[name] = 1;
                     }
                 }
                 return new MockRAMInputStream(this, name, file);
@@ -331,9 +317,8 @@ namespace Lucene.Net.Store
             lock (this)
             {
                 long size = 0;
-                System.Collections.IEnumerator it = fileMap.Values.GetEnumerator();
-                while (it.MoveNext())
-                    size += ((RAMFile)it.Current).GetSizeInBytes();
+                foreach(RAMFile file in fileMap.Values)
+                    size += file.GetSizeInBytes();
                 return size;
             }
         }
@@ -349,9 +334,8 @@ namespace Lucene.Net.Store
             lock (this)
             {
                 long size = 0;
-                System.Collections.IEnumerator it = fileMap.Values.GetEnumerator();
-                while (it.MoveNext())
-                    size += ((RAMFile)it.Current).length_ForNUnit;
+                foreach(RAMFile file in fileMap.Values)
+                    size += file.length;
                 return size;
             }
         }
@@ -362,8 +346,8 @@ namespace Lucene.Net.Store
             {
                 if (openFiles == null)
                 {
-                    openFiles = new System.Collections.Hashtable();
-                    openFilesDeleted = new System.Collections.Hashtable();
+                    openFiles = new Dictionary<string, int>();
+                    openFilesDeleted = new HashSet<string>();
                 }
                 if (noDeleteOpenFile && openFiles.Count > 0)
                 {

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Store/MockRAMInputStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Store/MockRAMInputStream.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Store/MockRAMInputStream.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Store/MockRAMInputStream.cs Tue Nov 15 08:41:44 2011
@@ -53,14 +53,14 @@ namespace Lucene.Net.Store
 					// Could be null when MockRAMDirectory.crash() was called
 					if (dir.openFiles[name] != null)
 					{
-						System.Int32 v = (System.Int32) dir.openFiles[name];
+						System.Int32 v = dir.openFiles[name];
 						if (v == 1)
 						{
 							dir.openFiles.Remove(name);
 						}
 						else
 						{
-							v = (System.Int32) (v - 1);
+							v = v - 1;
 							dir.openFiles[name] = v;
 						}
 					}

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Store/TestBufferedIndexInput.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Store/TestBufferedIndexInput.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Store/TestBufferedIndexInput.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Store/TestBufferedIndexInput.cs Tue Nov 15 08:41:44 2011
@@ -106,7 +106,8 @@ namespace Lucene.Net.Store
 			// run test with chunk size of 10 bytes
 			RunReadBytesAndClose(new SimpleFSIndexInput(tmpInputFile, inputBufferSize, 10), inputBufferSize, r);
 			// run test with chunk size of 100 MB - default
-			RunReadBytesAndClose(new SimpleFSIndexInput(tmpInputFile, inputBufferSize), inputBufferSize, r);
+			RunReadBytesAndClose(new SimpleFSIndexInput(tmpInputFile, inputBufferSize, FSDirectory.DEFAULT_READ_CHUNK_SIZE), inputBufferSize, r);
+            Assert.Pass("Supressing Sub-Tests on NIOFSIndexInput classes");
 			// run test with chunk size of 10 bytes
 			//RunReadBytesAndClose(new NIOFSIndexInput(tmpInputFile, inputBufferSize, 10), inputBufferSize, r);    // {{Aroush-2.9}} suppressing this test since NIOFSIndexInput isn't ported
             System.Console.Out.WriteLine("Suppressing sub-test: 'RunReadBytesAndClose(new NIOFSIndexInput(tmpInputFile, inputBufferSize, 10), inputBufferSize, r);' since NIOFSIndexInput isn't ported");
@@ -284,7 +285,7 @@ namespace Lucene.Net.Store
 		[Test]
 		public virtual void  TestSetBufferSize()
 		{
-			System.IO.FileInfo indexDir = new System.IO.FileInfo(System.IO.Path.Combine(AppSettings.Get("tempDir", ""), "testSetBufferSize"));
+			System.IO.DirectoryInfo indexDir = new System.IO.DirectoryInfo(System.IO.Path.Combine(AppSettings.Get("tempDir", ""), "testSetBufferSize"));
 			MockFSDirectory dir = new MockFSDirectory(indexDir, NewRandom());
 			try
 			{
@@ -301,7 +302,7 @@ namespace Lucene.Net.Store
 				
 				dir.allIndexInputs.Clear();
 				
-				IndexReader reader = IndexReader.Open(dir);
+				IndexReader reader = IndexReader.Open(dir, false);
 				Term aaa = new Term("content", "aaa");
 				Term bbb = new Term("content", "bbb");
 				Term ccc = new Term("content", "ccc");
@@ -342,7 +343,7 @@ namespace Lucene.Net.Store
 			
 			private Directory dir;
 			
-			public MockFSDirectory(System.IO.FileInfo path, System.Random rand)
+			public MockFSDirectory(System.IO.DirectoryInfo path, System.Random rand)
 			{
 				this.rand = rand;
 				lockFactory = new NoLockFactory();
@@ -411,10 +412,6 @@ namespace Lucene.Net.Store
 			{
 				return dir.FileExists(name);
 			}
-			public override System.String[] List()
-			{
-				return dir.List();
-			}
 			public override System.String[] ListAll()
 			{
 				return dir.ListAll();
@@ -424,10 +421,6 @@ namespace Lucene.Net.Store
 			{
 				return dir.FileLength(name);
 			}
-			public override void  RenameFile(System.String from, System.String to)
-			{
-				dir.RenameFile(from, to);
-			}
 		}
 	}
 }
\ No newline at end of file