You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ar...@apache.org on 2009/11/03 19:06:38 UTC

svn commit: r832486 [26/29] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene...

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestTimeLimitedCollector.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestTimeLimitedCollector.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestTimeLimitedCollector.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestTimeLimitedCollector.cs Tue Nov  3 18:06:27 2009
@@ -1,20 +1,24 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
  */
 
+using System;
+
+using NUnit.Framework;
+
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
@@ -25,400 +29,367 @@
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
-using BitSet = SupportClass.CollectionsSupport.BitSet;
-using Thread = SupportClass.ThreadClass;
-
-using Exception = System.Exception;
-using InterruptedException = System.Threading.ThreadInterruptedException;
-using IOException = System.IO.IOException;
-using String = System.String;
-
-using NUnit.Framework;
-
 namespace Lucene.Net.Search
 {
-
-
-    /**
-     * Tests the TimeLimitedCollector.  This test checks (1) search
-     * correctness (regardless of timeout), (2) expected timeout behavior,
-     * and (3) a sanity test with multiple searching threads.
-     */
-    [TestFixture()]
-    public class TestTimeLimitedCollector : LuceneTestCase
-    {
-        private static readonly int SLOW_DOWN = 47;
-        private static readonly long TIME_ALLOWED = 17 * SLOW_DOWN; // so searches can find about 17 docs.
-
-        // max time allowed is relaxed for multithreading tests. 
-        // the multithread case fails when setting this to 1 (no slack) and launching many threads (>2000).  
-        // but this is not a real failure, just noise.
-        private static readonly long MULTI_THREAD_SLACK = 7;
-
-        private static readonly int N_DOCS = 3000;
-        private static readonly int N_THREADS = 50;
-
-        private Searcher searcher;
-        private readonly String FIELD_NAME = "body";
-        private Query query;
-
-        public TestTimeLimitedCollector()
-        {
-        }
-
-        /**
-         * initializes searcher with a document set
-         */
-        [TestFixtureSetUp()]
-        protected void setUp()
-        {
-            String[] docText = {
-                "docThatNeverMatchesSoWeCanRequireLastDocCollectedToBeGreaterThanZero",
-                "one blah three",
-                "one foo three multiOne",
-                "one foobar three multiThree",
-                "blueberry pancakes",
-                "blueberry pie",
-                "blueberry strudel",
-                "blueberry pizza",
-            };
-            Directory directory = new RAMDirectory();
-            IndexWriter iw = new IndexWriter(directory, new WhitespaceAnalyzer(), true, MaxFieldLength.UNLIMITED);
-
-            for (int i = 0; i < N_DOCS; i++)
-            {
-                add(docText[i % docText.Length], iw);
-            }
-            iw.Close();
-            searcher = new IndexSearcher(directory);
-
-            String qtxt = "one";
-            for (int i = 0; i < docText.Length; i++)
-            {
-                qtxt += ' ' + docText[i]; // large query so that search will be longer
-            }
-            QueryParser queryParser = new QueryParser(FIELD_NAME, new WhitespaceAnalyzer());
-            query = queryParser.Parse(qtxt);
-
-            // warm the searcher
-            searcher.Search(query, null, 1000);
-        }
-
-        [TestFixtureTearDown()]
-        public void tearDown()
-        {
-            searcher.Close();
-        }
-
-        private void add(String value, IndexWriter iw)
-        {
-            Document d = new Document();
-            d.Add(new Field(FIELD_NAME, value, Field.Store.NO, Field.Index.ANALYZED));
-            iw.AddDocument(d);
-        }
-
-        private void search(HitCollector collector)
-        {
-            searcher.Search(query, collector);
-        }
-
-        /**
-         * test search correctness with no timeout
-         */
-        [Test]
-        public void testSearch()
-        {
-            doTestSearch();
-        }
-
-        private void doTestSearch()
-        {
-            int totalResults = 0;
-            int totalTLCResults = 0;
-            try
-            {
-                MyHitCollector myHc = new MyHitCollector();
-                search(myHc);
-                totalResults = myHc.hitCount();
-
-                myHc = new MyHitCollector();
-                long oneHour = 3600000;
-                HitCollector tlCollector = createTimedCollector(myHc, oneHour, false);
-                search(tlCollector);
-                totalTLCResults = myHc.hitCount();
-            }
-            catch (Exception e)
-            {
-                Assert.IsTrue(false, "Unexpected exception: " + e); //==fail
-            }
-            Assert.AreEqual(totalResults, totalTLCResults, "Wrong number of results!");
-        }
-
-        private HitCollector createTimedCollector(MyHitCollector hc, long timeAllowed, bool greedy)
-        {
-            TimeLimitedCollector res = new TimeLimitedCollector(hc, timeAllowed);
-            res.setGreedy(greedy); // set to true to make sure at least one doc is collected.
-            return res;
-        }
-
-        /**
-         * Test that timeout is obtained, and soon enough!
-         */
-        [Test]
-        public void testTimeoutGreedy()
-        {
-            doTestTimeout(false, true);
-        }
-
-        /**
-         * Test that timeout is obtained, and soon enough!
-         */
-        [Test]
-        public void testTimeoutNotGreedy()
-        {
-            doTestTimeout(false, false);
-        }
-
-        private void doTestTimeout(bool multiThreaded, bool greedy)
-        {
-            // setup
-            MyHitCollector myHc = new MyHitCollector();
-            myHc.setSlowDown(SLOW_DOWN);
-            HitCollector tlCollector = createTimedCollector(myHc, TIME_ALLOWED, greedy);
-
-            // search
-            TimeLimitedCollector.TimeExceededException timoutException = null;
-            try
-            {
-                search(tlCollector);
-            }
-            catch (TimeLimitedCollector.TimeExceededException x)
-            {
-                timoutException = x;
-            }
-            catch (Exception e)
-            {
-                Assert.IsTrue(false, "Unexpected exception: " + e); //==fail
-            }
-
-            // must get exception
-            Assert.IsNotNull(timoutException, "Timeout expected!");
-
-            // greediness affect last doc collected
-            int exceptionDoc = timoutException.getLastDocCollected();
-            int lastCollected = myHc.getLastDocCollected();
-            Assert.IsTrue(exceptionDoc > 0, "doc collected at timeout must be > 0!");
-            if (greedy)
-            {
-                Assert.IsTrue(exceptionDoc == lastCollected, "greedy=" + greedy + " exceptionDoc=" + exceptionDoc + " != lastCollected=" + lastCollected);
-                Assert.IsTrue(myHc.hitCount() > 0, "greedy, but no hits found!");
-            }
-            else
-            {
-                Assert.IsTrue(exceptionDoc > lastCollected, "greedy=" + greedy + " exceptionDoc=" + exceptionDoc + " not > lastCollected=" + lastCollected);
-            }
-
-            // verify that elapsed time at exception is within valid limits
-            Assert.AreEqual(timoutException.getTimeAllowed(), TIME_ALLOWED);
-            // a) Not too early
-            Assert.IsTrue(timoutException.getTimeElapsed() > TIME_ALLOWED - TimeLimitedCollector.getResolution(),
-                "elapsed=" + timoutException.getTimeElapsed() + " <= (allowed-resolution)=" + (TIME_ALLOWED - TimeLimitedCollector.getResolution())
-                );
-            // b) Not too late.
-            //    This part is problematic in a busy test system, so we just print a warning.
-            //    We already verified that a timeout occurred, we just can't be picky about how long it took.
-            if (timoutException.getTimeElapsed() > maxTime(multiThreaded))
-            {
-                System.Console.Out.WriteLine("Informative: timeout exceeded (no action required: most probably just " +
-                  " because the test machine is slower than usual):  " +
-                  "lastDoc=" + exceptionDoc +
-                  " ,&& allowed=" + timoutException.getTimeAllowed() +
-                  " ,&& elapsed=" + timoutException.getTimeElapsed() +
-                  " >= " + maxTimeStr(multiThreaded));
-            }
-        }
-
-        private long maxTime(bool multiThreaded)
-        {
-            long res = 2 * TimeLimitedCollector.getResolution() + TIME_ALLOWED + SLOW_DOWN; // some slack for less noise in this test
-            if (multiThreaded)
-            {
-                res *= MULTI_THREAD_SLACK; // larger slack  
-            }
-            return res;
-        }
-
-        private String maxTimeStr(bool multiThreaded)
-        {
-            String s =
-              "( " +
-              "2*resolution +  TIME_ALLOWED + SLOW_DOWN = " +
-              "2*" + TimeLimitedCollector.getResolution() + " + " + TIME_ALLOWED + " + " + SLOW_DOWN +
-              ")";
-            if (multiThreaded)
-            {
-                s = MULTI_THREAD_SLACK + " * " + s;
-            }
-            return maxTime(multiThreaded) + " = " + s;
-        }
-
-        /**
-         * Test timeout behavior when resolution is modified. 
-         */
-        [Test]
-        public void testModifyResolution()
-        {
-            try
-            {
-                // increase and test
-                uint resolution = 20 * TimeLimitedCollector.DEFAULT_RESOLUTION; //400
-                //TimeLimitedCollector.setResolution(resolution);
-                //Assert.AreEqual(resolution, TimeLimitedCollector.getResolution());
-                doTestTimeout(false, true);
-                // decrease much and test
-                resolution = 5;
-                //TimeLimitedCollector.setResolution(resolution);
-                //Assert.AreEqual(resolution, TimeLimitedCollector.getResolution());
-                doTestTimeout(false, true);
-                // return to default and test
-                resolution = TimeLimitedCollector.DEFAULT_RESOLUTION;
-                //TimeLimitedCollector.setResolution(resolution);
-                //Assert.AreEqual(resolution, TimeLimitedCollector.getResolution());
-                doTestTimeout(false, true);
-            }
-            finally
-            {
-                TimeLimitedCollector.setResolution(TimeLimitedCollector.DEFAULT_RESOLUTION);
-            }
-        }
-
-        /** 
-         * Test correctness with multiple searching threads.
-         */
-        [Test]
-        public void testSearchMultiThreaded()
-        {
-            doTestMultiThreads(false);
-        }
-
-        /** 
-         * Test correctness with multiple searching threads.
-         */
-        [Test]
-        public void testTimeoutMultiThreaded()
-        {
-            doTestMultiThreads(true);
-        }
-
-        internal class AnonymousClassThread : Thread
-        {
-            private TestTimeLimitedCollector enclosingInstance;
-            private BitSet success;
-            private bool withTimeout;
-            private int num;
-
-            internal AnonymousClassThread(TestTimeLimitedCollector enclosingInstance, BitSet success, bool withTimeout, int num)
-                : base()
-            {
-                this.enclosingInstance = enclosingInstance;
-                this.success = success;
-                this.withTimeout = withTimeout;
-                this.num = num;
-            }
-
-            override public void Run()
-            {
-                if (withTimeout)
-                {
-                    enclosingInstance.doTestTimeout(true, true);
-                }
-                else
-                {
-                    enclosingInstance.doTestSearch();
-                }
-                lock (success)
-                {
-                    success.Set(num);
-                }
-            }
-        }
-
-        private void doTestMultiThreads(bool withTimeout)
-        {
-            Thread[] threadArray = new Thread[N_THREADS];
-            BitSet success = new BitSet(N_THREADS);
-            for (int i = 0; i < threadArray.Length; ++i)
-            {
-                int num = i;
-                threadArray[num] = new AnonymousClassThread(this, success, withTimeout, num);
-            }
-            for (int i = 0; i < threadArray.Length; ++i)
-            {
-                threadArray[i].Start();
-            }
-            bool interrupted = false;
-            for (int i = 0; i < threadArray.Length; ++i)
-            {
-                try
-                {
-                    threadArray[i].Join();
-                }
-                catch (InterruptedException)
-                {
-                    interrupted = true;
-                }
-            }
-            if (interrupted)
-            {
-                Thread.CurrentThread().Interrupt();
-            }
-            Assert.AreEqual(N_THREADS, success.Cardinality(), "some threads failed!");
-        }
-
-        // counting hit collector that can slow down at collect().
-        private class MyHitCollector : HitCollector
-        {
-            private readonly BitSet bits = new BitSet();
-            private int slowdown = 0;
-            private int lastDocCollected = -1;
-
-            /**
-             * amount of time to wait on each collect to simulate a long iteration
-             */
-            public void setSlowDown(int milliseconds)
-            {
-                slowdown = milliseconds;
-            }
-
-            override public void Collect(int doc, float score)
-            {
-                if (slowdown > 0)
-                {
-                    try
-                    {
-                        Thread.Sleep(slowdown);
-                    }
-                    catch (InterruptedException x)
-                    {
-                        System.Console.Out.WriteLine("caught " + x);
-                    }
-                }
-                bits.Set(doc);
-                lastDocCollected = doc;
-            }
-
-            public int hitCount()
-            {
-                return bits.Cardinality();
-            }
-
-            public int getLastDocCollected()
-            {
-                return lastDocCollected;
-            }
-
-        }
-
-    }
-
-}
+	
+	/// <summary> Tests the TimeLimitedCollector.  This test checks (1) search
+	/// correctness (regardless of timeout), (2) expected timeout behavior,
+	/// and (3) a sanity test with multiple searching threads.
+	/// </summary>
+    [TestFixture]
+	public class TestTimeLimitedCollector:LuceneTestCase
+	{
+		private class AnonymousClassThread:SupportClass.ThreadClass
+		{
+			public AnonymousClassThread(bool withTimeout, System.Collections.BitArray success, int num, TestTimeLimitedCollector enclosingInstance)
+			{
+				InitBlock(withTimeout, success, num, enclosingInstance);
+			}
+			private void  InitBlock(bool withTimeout, System.Collections.BitArray success, int num, TestTimeLimitedCollector enclosingInstance)
+			{
+				this.withTimeout = withTimeout;
+				this.success = success;
+				this.num = num;
+				this.enclosingInstance = enclosingInstance;
+			}
+			private bool withTimeout;
+			private System.Collections.BitArray success;
+			private int num;
+			private TestTimeLimitedCollector enclosingInstance;
+			public TestTimeLimitedCollector Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			override public void  Run()
+			{
+				if (withTimeout)
+				{
+					Enclosing_Instance.DoTestTimeout(true, true);
+				}
+				else
+				{
+					Enclosing_Instance.DoTestSearch();
+				}
+				lock (success.SyncRoot)
+				{
+					success.Set(num, true);
+				}
+			}
+		}
+		private const int SLOW_DOWN = 47;
+		private static readonly long TIME_ALLOWED = 17 * SLOW_DOWN; // so searches can find about 17 docs.
+		
+		// max time allowed is relaxed for multithreading tests. 
+		// the multithread case fails when setting this to 1 (no slack) and launching many threads (>2000).  
+		// but this is not a real failure, just noise.
+		private const double MULTI_THREAD_SLACK = 7;
+		
+		private const int N_DOCS = 3000;
+		private const int N_THREADS = 50;
+		
+		private Searcher searcher;
+		private System.String FIELD_NAME = "body";
+		private Query query;
+		
+		public TestTimeLimitedCollector(System.String name):base(name)
+		{
+		}
+		
+		/// <summary> initializes searcher with a document set</summary>
+		[Test]
+		public override void  SetUp()
+		{
+			base.SetUp();
+			System.String[] docText = new System.String[]{"docThatNeverMatchesSoWeCanRequireLastDocCollectedToBeGreaterThanZero", "one blah three", "one foo three multiOne", "one foobar three multiThree", "blueberry pancakes", "blueberry pie", "blueberry strudel", "blueberry pizza"};
+			Directory directory = new RAMDirectory();
+			IndexWriter iw = new IndexWriter(directory, new WhitespaceAnalyzer(), true, MaxFieldLength.UNLIMITED);
+			
+			for (int i = 0; i < N_DOCS; i++)
+			{
+				Add(docText[i % docText.Length], iw);
+			}
+			iw.Close();
+			searcher = new IndexSearcher(directory);
+			
+			System.String qtxt = "one";
+			for (int i = 0; i < docText.Length; i++)
+			{
+				qtxt += (' ' + docText[i]); // large query so that search will be longer
+			}
+			QueryParser queryParser = new QueryParser(FIELD_NAME, new WhitespaceAnalyzer());
+			query = queryParser.Parse(qtxt);
+			
+			// warm the searcher
+			searcher.Search(query, null, 1000);
+		}
+		
+		[TearDown]
+		public override void  TearDown()
+		{
+			searcher.Close();
+			base.TearDown();
+		}
+		
+		private void  Add(System.String value_Renamed, IndexWriter iw)
+		{
+			Document d = new Document();
+			d.Add(new Field(FIELD_NAME, value_Renamed, Field.Store.NO, Field.Index.ANALYZED));
+			iw.AddDocument(d);
+		}
+		
+		private void  Search(HitCollector collector)
+		{
+			searcher.Search(query, collector);
+		}
+		
+		/// <summary> test search correctness with no timeout</summary>
+		[Test]
+		public virtual void  TestSearch()
+		{
+			DoTestSearch();
+		}
+		
+		private void  DoTestSearch()
+		{
+			int totalResults = 0;
+			int totalTLCResults = 0;
+			try
+			{
+				MyHitCollector myHc = new MyHitCollector(this);
+				Search(myHc);
+				totalResults = myHc.HitCount();
+				
+				myHc = new MyHitCollector(this);
+				long oneHour = 3600000;
+				HitCollector tlCollector = CreateTimedCollector(myHc, oneHour, false);
+				Search(tlCollector);
+				totalTLCResults = myHc.HitCount();
+			}
+			catch (System.Exception e)
+			{
+				System.Console.Error.WriteLine(e.StackTrace);
+				Assert.IsTrue(false, "Unexpected exception: " + e); //==fail
+			}
+			Assert.AreEqual(totalResults, totalTLCResults, "Wrong number of results!");
+		}
+		
+		private HitCollector CreateTimedCollector(MyHitCollector hc, long timeAllowed, bool greedy)
+		{
+			TimeLimitedCollector res = new TimeLimitedCollector(hc, timeAllowed);
+			res.SetGreedy(greedy); // set to true to make sure at least one doc is collected.
+			return res;
+		}
+		
+		/// <summary> Test that timeout is obtained, and soon enough!</summary>
+		[Test]
+		public virtual void  TestTimeoutGreedy()
+		{
+			DoTestTimeout(false, true);
+		}
+		
+		/// <summary> Test that timeout is obtained, and soon enough!</summary>
+		[Test]
+		public virtual void  TestTimeoutNotGreedy()
+		{
+			DoTestTimeout(false, false);
+		}
+		
+		private void  DoTestTimeout(bool multiThreaded, bool greedy)
+		{
+			// setup
+			MyHitCollector myHc = new MyHitCollector(this);
+			myHc.SetSlowDown(SLOW_DOWN);
+			HitCollector tlCollector = CreateTimedCollector(myHc, TIME_ALLOWED, greedy);
+			
+			// search
+			TimeLimitedCollector.TimeExceededException timoutException = null;
+			try
+			{
+				Search(tlCollector);
+			}
+			catch (TimeLimitedCollector.TimeExceededException x)
+			{
+				timoutException = x;
+			}
+			catch (System.Exception e)
+			{
+				Assert.IsTrue(false, "Unexpected exception: " + e); //==fail
+			}
+			
+			// must get exception
+			Assert.IsNotNull(timoutException, "Timeout expected!");
+			
+			// greediness affect last doc collected
+			int exceptionDoc = timoutException.GetLastDocCollected();
+			int lastCollected = myHc.GetLastDocCollected();
+			Assert.IsTrue(exceptionDoc > 0, "doc collected at timeout must be > 0!");
+			if (greedy)
+			{
+				Assert.IsTrue(exceptionDoc == lastCollected, "greedy=" + greedy + " exceptionDoc=" + exceptionDoc + " != lastCollected=" + lastCollected);
+				Assert.IsTrue(myHc.HitCount() > 0, "greedy, but no hits found!");
+			}
+			else
+			{
+				Assert.IsTrue(exceptionDoc > lastCollected, "greedy=" + greedy + " exceptionDoc=" + exceptionDoc + " not > lastCollected=" + lastCollected);
+			}
+			
+			// verify that elapsed time at exception is within valid limits
+			Assert.AreEqual(timoutException.GetTimeAllowed(), TIME_ALLOWED);
+			// a) Not too early
+			Assert.IsTrue(timoutException.GetTimeElapsed() > TIME_ALLOWED - TimeLimitedCollector.GetResolution(), "elapsed=" + timoutException.GetTimeElapsed() + " <= (allowed-resolution)=" + (TIME_ALLOWED - TimeLimitedCollector.GetResolution()));
+			// b) Not too late.
+			//    This part is problematic in a busy test system, so we just print a warning.
+			//    We already verified that a timeout occurred, we just can't be picky about how long it took.
+			if (timoutException.GetTimeElapsed() > MaxTime(multiThreaded))
+			{
+				System.Console.Out.WriteLine("Informative: timeout exceeded (no action required: most probably just " + " because the test machine is slower than usual):  " + "lastDoc=" + exceptionDoc + " ,&& allowed=" + timoutException.GetTimeAllowed() + " ,&& elapsed=" + timoutException.GetTimeElapsed() + " >= " + MaxTimeStr(multiThreaded));
+			}
+		}
+		
+		private long MaxTime(bool multiThreaded)
+		{
+			long res = 2 * TimeLimitedCollector.GetResolution() + TIME_ALLOWED + SLOW_DOWN; // some slack for less noise in this test
+			if (multiThreaded)
+			{
+				res = (long) (res * MULTI_THREAD_SLACK); // larger slack  
+			}
+			return res;
+		}
+		
+		private System.String MaxTimeStr(bool multiThreaded)
+		{
+			System.String s = "( " + "2*resolution +  TIME_ALLOWED + SLOW_DOWN = " + "2*" + TimeLimitedCollector.GetResolution() + " + " + TIME_ALLOWED + " + " + SLOW_DOWN + ")";
+			if (multiThreaded)
+			{
+				s = MULTI_THREAD_SLACK + " * " + s;
+			}
+			return MaxTime(multiThreaded) + " = " + s;
+		}
+		
+		/// <summary> Test timeout behavior when resolution is modified. </summary>
+		[Test]
+		public virtual void  TestModifyResolution()
+		{
+			try
+			{
+				// increase and test
+				uint resolution = 20 * TimeLimitedCollector.DEFAULT_RESOLUTION; //400
+				TimeLimitedCollector.SetResolution(resolution);
+				Assert.AreEqual(resolution, TimeLimitedCollector.GetResolution());
+				DoTestTimeout(false, true);
+				// decrease much and test
+				resolution = 5;
+				TimeLimitedCollector.SetResolution(resolution);
+				Assert.AreEqual(resolution, TimeLimitedCollector.GetResolution());
+				DoTestTimeout(false, true);
+				// return to default and test
+				resolution = TimeLimitedCollector.DEFAULT_RESOLUTION;
+				TimeLimitedCollector.SetResolution(resolution);
+				Assert.AreEqual(resolution, TimeLimitedCollector.GetResolution());
+				DoTestTimeout(false, true);
+			}
+			finally
+			{
+				TimeLimitedCollector.SetResolution(TimeLimitedCollector.DEFAULT_RESOLUTION);
+			}
+		}
+		
+		/// <summary> Test correctness with multiple searching threads.</summary>
+		[Test]
+		public virtual void  TestSearchMultiThreaded()
+		{
+			DoTestMultiThreads(false);
+		}
+		
+		/// <summary> Test correctness with multiple searching threads.</summary>
+		[Test]
+		public virtual void  TestTimeoutMultiThreaded()
+		{
+			DoTestMultiThreads(true);
+		}
+		
+		private void  DoTestMultiThreads(bool withTimeout)
+		{
+			SupportClass.ThreadClass[] threadArray = new SupportClass.ThreadClass[N_THREADS];
+			System.Collections.BitArray success = new System.Collections.BitArray((N_THREADS % 64 == 0?N_THREADS / 64:N_THREADS / 64 + 1) * 64);
+			for (int i = 0; i < threadArray.Length; ++i)
+			{
+				int num = i;
+				threadArray[num] = new AnonymousClassThread(withTimeout, success, num, this);
+			}
+			for (int i = 0; i < threadArray.Length; ++i)
+			{
+				threadArray[i].Start();
+			}
+			for (int i = 0; i < threadArray.Length; ++i)
+			{
+				threadArray[i].Join();
+			}
+			Assert.AreEqual(N_THREADS, SupportClass.BitSetSupport.Cardinality(success), "some threads failed!");
+		}
+		
+		// counting hit collector that can slow down at collect().
+		private class MyHitCollector:HitCollector
+		{
+			public MyHitCollector(TestTimeLimitedCollector enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestTimeLimitedCollector enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTimeLimitedCollector enclosingInstance;
+			public TestTimeLimitedCollector Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			private System.Collections.BitArray bits = new System.Collections.BitArray(64);
+			private int slowdown = 0;
+			private int lastDocCollected = - 1;
+			
+			/// <summary> amount of time to wait on each collect to simulate a long iteration</summary>
+			public virtual void  SetSlowDown(int milliseconds)
+			{
+				slowdown = milliseconds;
+			}
+			
+			public override void  Collect(int docId, float score)
+			{
+				if (slowdown > 0)
+				{
+					try
+					{
+						System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * slowdown));
+					}
+					catch (System.Threading.ThreadInterruptedException ie)
+					{
+						SupportClass.ThreadClass.Current().Interrupt();
+						throw new System.SystemException("", ie);
+					}
+				}
+				System.Diagnostics.Debug.Assert(docId >= 0, "doc=" + docId);
+				bits.Set(docId, true);
+				lastDocCollected = docId;
+			}
+			
+			public virtual int HitCount()
+			{
+				return SupportClass.BitSetSupport.Cardinality(bits);
+			}
+			
+			public virtual int GetLastDocCollected()
+			{
+				return lastDocCollected;
+			}
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestTimeLimitingCollector.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestTimeLimitingCollector.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestTimeLimitingCollector.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestTimeLimitingCollector.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,414 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using MaxFieldLength = Lucene.Net.Index.IndexWriter.MaxFieldLength;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using TimeExceededException = Lucene.Net.Search.TimeLimitingCollector.TimeExceededException;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search
+{
+	
+	/// <summary> Tests the {@link TimeLimitingCollector}.  This test checks (1) search
+	/// correctness (regardless of timeout), (2) expected timeout behavior,
+	/// and (3) a sanity test with multiple searching threads.
+	/// </summary>
+    [TestFixture]
+	public class TestTimeLimitingCollector:LuceneTestCase
+	{
+		private class AnonymousClassThread:SupportClass.ThreadClass
+		{
+			public AnonymousClassThread(bool withTimeout, System.Collections.BitArray success, int num, TestTimeLimitingCollector enclosingInstance)
+			{
+				InitBlock(withTimeout, success, num, enclosingInstance);
+			}
+			private void  InitBlock(bool withTimeout, System.Collections.BitArray success, int num, TestTimeLimitingCollector enclosingInstance)
+			{
+				this.withTimeout = withTimeout;
+				this.success = success;
+				this.num = num;
+				this.enclosingInstance = enclosingInstance;
+			}
+			private bool withTimeout;
+			private System.Collections.BitArray success;
+			private int num;
+			private TestTimeLimitingCollector enclosingInstance;
+			public TestTimeLimitingCollector Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			override public void  Run()
+			{
+				if (withTimeout)
+				{
+					Enclosing_Instance.DoTestTimeout(true, true);
+				}
+				else
+				{
+					Enclosing_Instance.DoTestSearch();
+				}
+				lock (success.SyncRoot)
+				{
+					success.Set(num, true);
+				}
+			}
+		}
+		private const int SLOW_DOWN = 47;
+		private static readonly long TIME_ALLOWED = 17 * SLOW_DOWN; // so searches can find about 17 docs.
+		
+		// max time allowed is relaxed for multithreading tests. 
+		// the multithread case fails when setting this to 1 (no slack) and launching many threads (>2000).  
+		// but this is not a real failure, just noise.
+		private const double MULTI_THREAD_SLACK = 7;
+		
+		private const int N_DOCS = 3000;
+		private const int N_THREADS = 50;
+		
+		private Searcher searcher;
+		private System.String FIELD_NAME = "body";
+		private Query query;
+		
+		public TestTimeLimitingCollector(System.String name):base(name)
+		{
+		}
+		
+		/// <summary> initializes searcher with a document set</summary>
+		[Test]
+		public override void  SetUp()
+		{
+			base.SetUp();
+			System.String[] docText = new System.String[]{"docThatNeverMatchesSoWeCanRequireLastDocCollectedToBeGreaterThanZero", "one blah three", "one foo three multiOne", "one foobar three multiThree", "blueberry pancakes", "blueberry pie", "blueberry strudel", "blueberry pizza"};
+			Directory directory = new RAMDirectory();
+			IndexWriter iw = new IndexWriter(directory, new WhitespaceAnalyzer(), true, MaxFieldLength.UNLIMITED);
+			
+			for (int i = 0; i < N_DOCS; i++)
+			{
+				Add(docText[i % docText.Length], iw);
+			}
+			iw.Close();
+			searcher = new IndexSearcher(directory);
+			
+			System.String qtxt = "one";
+			for (int i = 0; i < docText.Length; i++)
+			{
+				qtxt += (' ' + docText[i]); // large query so that search will be longer
+			}
+			QueryParser queryParser = new QueryParser(FIELD_NAME, new WhitespaceAnalyzer());
+			query = queryParser.Parse(qtxt);
+			
+			// warm the searcher
+			searcher.Search(query, null, 1000);
+		}
+		
+		[TearDown]
+		public override void  TearDown()
+		{
+			searcher.Close();
+			base.TearDown();
+		}
+		
+		private void  Add(System.String value_Renamed, IndexWriter iw)
+		{
+			Document d = new Document();
+			d.Add(new Field(FIELD_NAME, value_Renamed, Field.Store.NO, Field.Index.ANALYZED));
+			iw.AddDocument(d);
+		}
+		
+		private void  Search(Collector collector)
+		{
+			searcher.Search(query, collector);
+		}
+		
+		/// <summary> test search correctness with no timeout</summary>
+        [Test]
+		public virtual void  TestSearch()
+		{
+			DoTestSearch();
+		}
+		
+		private void  DoTestSearch()
+		{
+			int totalResults = 0;
+			int totalTLCResults = 0;
+			try
+			{
+				MyHitCollector myHc = new MyHitCollector(this);
+				Search(myHc);
+				totalResults = myHc.HitCount();
+				
+				myHc = new MyHitCollector(this);
+				long oneHour = 3600000;
+				Collector tlCollector = CreateTimedCollector(myHc, oneHour, false);
+				Search(tlCollector);
+				totalTLCResults = myHc.HitCount();
+			}
+			catch (System.Exception e)
+			{
+				System.Console.Error.WriteLine(e.StackTrace);
+				Assert.IsTrue(false, "Unexpected exception: " + e); //==fail
+			}
+			Assert.AreEqual(totalResults, totalTLCResults, "Wrong number of results!");
+		}
+		
+		private Collector CreateTimedCollector(MyHitCollector hc, long timeAllowed, bool greedy)
+		{
+			TimeLimitingCollector res = new TimeLimitingCollector(hc, timeAllowed);
+			res.SetGreedy(greedy); // set to true to make sure at least one doc is collected.
+			return res;
+		}
+		
+		/// <summary> Test that timeout is obtained, and soon enough!</summary>
+        [Test]
+		public virtual void  TestTimeoutGreedy()
+		{
+			DoTestTimeout(false, true);
+		}
+		
+		/// <summary> Test that timeout is obtained, and soon enough!</summary>
+        [Test]
+		public virtual void  TestTimeoutNotGreedy()
+		{
+			DoTestTimeout(false, false);
+		}
+		
+		private void  DoTestTimeout(bool multiThreaded, bool greedy)
+		{
+			// setup
+			MyHitCollector myHc = new MyHitCollector(this);
+			myHc.SetSlowDown(SLOW_DOWN);
+			Collector tlCollector = CreateTimedCollector(myHc, TIME_ALLOWED, greedy);
+			
+			// search
+			TimeExceededException timoutException = null;
+			try
+			{
+				Search(tlCollector);
+			}
+			catch (TimeExceededException x)
+			{
+				timoutException = x;
+			}
+			catch (System.Exception e)
+			{
+				Assert.IsTrue(false, "Unexpected exception: " + e); //==fail
+			}
+			
+			// must get exception
+			Assert.IsNotNull(timoutException, "Timeout expected!");
+			
+			// greediness affect last doc collected
+			int exceptionDoc = timoutException.GetLastDocCollected();
+			int lastCollected = myHc.GetLastDocCollected();
+			Assert.IsTrue(exceptionDoc > 0, "doc collected at timeout must be > 0!");
+			if (greedy)
+			{
+				Assert.IsTrue(exceptionDoc == lastCollected, "greedy=" + greedy + " exceptionDoc=" + exceptionDoc + " != lastCollected=" + lastCollected);
+				Assert.IsTrue(myHc.HitCount() > 0, "greedy, but no hits found!");
+			}
+			else
+			{
+				Assert.IsTrue(exceptionDoc > lastCollected, "greedy=" + greedy + " exceptionDoc=" + exceptionDoc + " not > lastCollected=" + lastCollected);
+			}
+			
+			// verify that elapsed time at exception is within valid limits
+			Assert.AreEqual(timoutException.GetTimeAllowed(), TIME_ALLOWED);
+			// a) Not too early
+			Assert.IsTrue(timoutException.GetTimeElapsed() > TIME_ALLOWED - TimeLimitingCollector.GetResolution(), "elapsed=" + timoutException.GetTimeElapsed() + " <= (allowed-resolution)=" + (TIME_ALLOWED - TimeLimitingCollector.GetResolution()));
+			// b) Not too late.
+			//    This part is problematic in a busy test system, so we just print a warning.
+			//    We already verified that a timeout occurred, we just can't be picky about how long it took.
+			if (timoutException.GetTimeElapsed() > MaxTime(multiThreaded))
+			{
+				System.Console.Out.WriteLine("Informative: timeout exceeded (no action required: most probably just " + " because the test machine is slower than usual):  " + "lastDoc=" + exceptionDoc + " ,&& allowed=" + timoutException.GetTimeAllowed() + " ,&& elapsed=" + timoutException.GetTimeElapsed() + " >= " + MaxTimeStr(multiThreaded));
+			}
+		}
+		
+		private long MaxTime(bool multiThreaded)
+		{
+			long res = 2 * TimeLimitingCollector.GetResolution() + TIME_ALLOWED + SLOW_DOWN; // some slack for less noise in this test
+			if (multiThreaded)
+			{
+				res = (long) (res * MULTI_THREAD_SLACK); // larger slack  
+			}
+			return res;
+		}
+		
+		private System.String MaxTimeStr(bool multiThreaded)
+		{
+			System.String s = "( " + "2*resolution +  TIME_ALLOWED + SLOW_DOWN = " + "2*" + TimeLimitingCollector.GetResolution() + " + " + TIME_ALLOWED + " + " + SLOW_DOWN + ")";
+			if (multiThreaded)
+			{
+				s = MULTI_THREAD_SLACK + " * " + s;
+			}
+			return MaxTime(multiThreaded) + " = " + s;
+		}
+		
+		/// <summary> Test timeout behavior when resolution is modified. </summary>
+        [Test]
+		public virtual void  TestModifyResolution()
+		{
+			try
+			{
+				// increase and test
+				uint resolution = 20 * TimeLimitingCollector.DEFAULT_RESOLUTION; //400
+				TimeLimitingCollector.SetResolution(resolution);
+				Assert.AreEqual(resolution, TimeLimitingCollector.GetResolution());
+				DoTestTimeout(false, true);
+				// decrease much and test
+				resolution = 5;
+				TimeLimitingCollector.SetResolution(resolution);
+				Assert.AreEqual(resolution, TimeLimitingCollector.GetResolution());
+				DoTestTimeout(false, true);
+				// return to default and test
+				resolution = TimeLimitingCollector.DEFAULT_RESOLUTION;
+				TimeLimitingCollector.SetResolution(resolution);
+				Assert.AreEqual(resolution, TimeLimitingCollector.GetResolution());
+				DoTestTimeout(false, true);
+			}
+			finally
+			{
+				TimeLimitingCollector.SetResolution(TimeLimitingCollector.DEFAULT_RESOLUTION);
+			}
+		}
+		
+		/// <summary> Test correctness with multiple searching threads.</summary>
+        [Test]
+		public virtual void  TestSearchMultiThreaded()
+		{
+			DoTestMultiThreads(false);
+		}
+		
+		/// <summary> Test correctness with multiple searching threads.</summary>
+        [Test]
+		public virtual void  TestTimeoutMultiThreaded()
+		{
+			DoTestMultiThreads(true);
+		}
+		
+		private void  DoTestMultiThreads(bool withTimeout)
+		{
+			SupportClass.ThreadClass[] threadArray = new SupportClass.ThreadClass[N_THREADS];
+			System.Collections.BitArray success = new System.Collections.BitArray((N_THREADS % 64 == 0?N_THREADS / 64:N_THREADS / 64 + 1) * 64);
+			for (int i = 0; i < threadArray.Length; ++i)
+			{
+				int num = i;
+				threadArray[num] = new AnonymousClassThread(withTimeout, success, num, this);
+			}
+			for (int i = 0; i < threadArray.Length; ++i)
+			{
+				threadArray[i].Start();
+			}
+			for (int i = 0; i < threadArray.Length; ++i)
+			{
+				threadArray[i].Join();
+			}
+			Assert.AreEqual(N_THREADS, SupportClass.BitSetSupport.Cardinality(success), "some threads failed!");
+		}
+		
+		// counting collector that can slow down at collect().
+		private class MyHitCollector:Collector
+		{
+			public MyHitCollector(TestTimeLimitingCollector enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestTimeLimitingCollector enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTimeLimitingCollector enclosingInstance;
+			public TestTimeLimitingCollector Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			private System.Collections.BitArray bits = new System.Collections.BitArray(64);
+			private int slowdown = 0;
+			private int lastDocCollected = - 1;
+			private int docBase = 0;
+			
+			/// <summary> amount of time to wait on each collect to simulate a long iteration</summary>
+			public virtual void  SetSlowDown(int milliseconds)
+			{
+				slowdown = milliseconds;
+			}
+			
+			public virtual int HitCount()
+			{
+				return SupportClass.BitSetSupport.Cardinality(bits);
+			}
+			
+			public virtual int GetLastDocCollected()
+			{
+				return lastDocCollected;
+			}
+			
+			public override void  SetScorer(Scorer scorer)
+			{
+				// scorer is not needed
+			}
+			
+			public override void  Collect(int doc)
+			{
+				int docId = doc + docBase;
+				if (slowdown > 0)
+				{
+					try
+					{
+						System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * slowdown));
+					}
+					catch (System.Threading.ThreadInterruptedException ie)
+					{
+						SupportClass.ThreadClass.Current().Interrupt();
+						throw new System.SystemException("", ie);
+					}
+				}
+				System.Diagnostics.Debug.Assert(docId >= 0, "base=" + docBase + " doc=" + doc);
+				bits.Set(docId, true);
+				lastDocCollected = docId;
+			}
+			
+			public override void  SetNextReader(IndexReader reader, int base_Renamed)
+			{
+				docBase = base_Renamed;
+			}
+			
+			public override bool AcceptsDocsOutOfOrder()
+			{
+				return false;
+			}
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestTopDocsCollector.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestTopDocsCollector.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestTopDocsCollector.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestTopDocsCollector.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,235 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using KeywordAnalyzer = Lucene.Net.Analysis.KeywordAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using MaxFieldLength = Lucene.Net.Index.IndexWriter.MaxFieldLength;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search
+{
+	
+    [TestFixture]
+	public class TestTopDocsCollector:LuceneTestCase
+	{
+		
+		private sealed class MyTopsDocCollector:TopDocsCollector
+		{
+			
+			private int idx = 0;
+			private int base_Renamed = 0;
+			
+			public MyTopsDocCollector(int size):base(new HitQueue(size, false))
+			{
+			}
+			
+			public /*protected internal*/ override TopDocs NewTopDocs(ScoreDoc[] results, int start)
+			{
+				if (results == null)
+				{
+					return EMPTY_TOPDOCS;
+				}
+				
+				float maxScore = System.Single.NaN;
+				if (start == 0)
+				{
+					maxScore = results[0].score;
+				}
+				else
+				{
+					for (int i = pq.Size(); i > 1; i--)
+					{
+						pq.Pop();
+					}
+					maxScore = ((ScoreDoc) pq.Pop()).score;
+				}
+				
+				return new TopDocs(totalHits, results, maxScore);
+			}
+			
+			public override void  Collect(int doc)
+			{
+				++totalHits;
+				pq.InsertWithOverflow(new ScoreDoc(doc + base_Renamed, Lucene.Net.Search.TestTopDocsCollector.scores[idx++]));
+			}
+			
+			public override void  SetNextReader(IndexReader reader, int docBase)
+			{
+				base_Renamed = docBase;
+			}
+			
+			public override void  SetScorer(Scorer scorer)
+			{
+				// Don't do anything. Assign scores in random
+			}
+			
+			public override bool AcceptsDocsOutOfOrder()
+			{
+				return true;
+			}
+		}
+		
+		// Scores array to be used by MyTopDocsCollector. If it is changed, MAX_SCORE
+		// must also change.
+		private static readonly float[] scores = new float[]{0.7767749f, 1.7839992f, 8.9925785f, 7.9608946f, 0.07948637f, 2.6356435f, 7.4950366f, 7.1490803f, 8.108544f, 4.961808f, 2.2423935f, 7.285586f, 4.6699767f, 2.9655676f, 6.953706f, 5.383931f, 6.9916306f, 8.365894f, 7.888485f, 8.723962f, 3.1796896f, 0.39971232f, 1.3077754f, 6.8489285f, 9.17561f, 5.060466f, 7.9793315f, 8.601509f, 4.1858315f, 0.28146625f};
+		
+		private const float MAX_SCORE = 9.17561f;
+		
+		private Directory dir = new RAMDirectory();
+		
+		private TopDocsCollector doSearch(int numResults)
+		{
+			Query q = new MatchAllDocsQuery();
+			IndexSearcher searcher = new IndexSearcher(dir);
+			TopDocsCollector tdc = new MyTopsDocCollector(numResults);
+			searcher.Search(q, tdc);
+			searcher.Close();
+			return tdc;
+		}
+		
+		[Test]
+		public override void  SetUp()
+		{
+			base.SetUp();
+			
+			// populate an index with 30 documents, this should be enough for the test.
+			// The documents have no content - the test uses MatchAllDocsQuery().
+			IndexWriter writer = new IndexWriter(dir, new KeywordAnalyzer(), MaxFieldLength.UNLIMITED);
+			for (int i = 0; i < 30; i++)
+			{
+				writer.AddDocument(new Document());
+			}
+			writer.Close();
+		}
+		
+		[TearDown]
+		public override void  TearDown()
+		{
+			dir.Close();
+			dir = null;
+			base.TearDown();
+		}
+		
+        [Test]
+		public virtual void  TestInvalidArguments()
+		{
+			int numResults = 5;
+			TopDocsCollector tdc = doSearch(numResults);
+			
+			// start < 0
+			Assert.AreEqual(0, tdc.TopDocs(- 1).scoreDocs.Length);
+			
+			// start > pq.size()
+			Assert.AreEqual(0, tdc.TopDocs(numResults + 1).scoreDocs.Length);
+			
+			// start == pq.size()
+			Assert.AreEqual(0, tdc.TopDocs(numResults).scoreDocs.Length);
+			
+			// howMany < 0
+			Assert.AreEqual(0, tdc.TopDocs(0, - 1).scoreDocs.Length);
+			
+			// howMany == 0
+			Assert.AreEqual(0, tdc.TopDocs(0, 0).scoreDocs.Length);
+		}
+		
+        [Test]
+		public virtual void  TestZeroResults()
+		{
+			TopDocsCollector tdc = new MyTopsDocCollector(5);
+			Assert.AreEqual(0, tdc.TopDocs(0, 1).scoreDocs.Length);
+		}
+		
+        [Test]
+		public virtual void  TestFirstResultsPage()
+		{
+			TopDocsCollector tdc = doSearch(15);
+			Assert.AreEqual(10, tdc.TopDocs(0, 10).scoreDocs.Length);
+		}
+		
+        [Test]
+		public virtual void  TestSecondResultsPages()
+		{
+			TopDocsCollector tdc = doSearch(15);
+			// ask for more results than are available
+			Assert.AreEqual(5, tdc.TopDocs(10, 10).scoreDocs.Length);
+			
+			// ask for 5 results (exactly what there should be
+			tdc = doSearch(15);
+			Assert.AreEqual(5, tdc.TopDocs(10, 5).scoreDocs.Length);
+			
+			// ask for less results than there are
+			tdc = doSearch(15);
+			Assert.AreEqual(4, tdc.TopDocs(10, 4).scoreDocs.Length);
+		}
+		
+        [Test]
+		public virtual void  TestGetAllResults()
+		{
+			TopDocsCollector tdc = doSearch(15);
+			Assert.AreEqual(15, tdc.TopDocs().scoreDocs.Length);
+		}
+		
+        [Test]
+		public virtual void  TestGetResultsFromStart()
+		{
+			TopDocsCollector tdc = doSearch(15);
+			// should bring all results
+			Assert.AreEqual(15, tdc.TopDocs(0).scoreDocs.Length);
+			
+			tdc = doSearch(15);
+			// get the last 5 only.
+			Assert.AreEqual(5, tdc.TopDocs(10).scoreDocs.Length);
+		}
+		
+        [Test]
+		public virtual void  TestMaxScore()
+		{
+			// ask for all results
+			TopDocsCollector tdc = doSearch(15);
+			TopDocs td = tdc.TopDocs();
+			Assert.AreEqual(MAX_SCORE, td.GetMaxScore(), 0f);
+			
+			// ask for 5 last results
+			tdc = doSearch(15);
+			td = tdc.TopDocs(10);
+			Assert.AreEqual(MAX_SCORE, td.GetMaxScore(), 0f);
+		}
+		
+		// This does not test the PQ's correctness, but whether topDocs()
+		// implementations return the results in decreasing score order.
+        [Test]
+		public virtual void  TestResultsOrder()
+		{
+			TopDocsCollector tdc = doSearch(15);
+			ScoreDoc[] sd = tdc.TopDocs().scoreDocs;
+			
+			Assert.AreEqual(MAX_SCORE, sd[0].score, 0f);
+			for (int i = 1; i < sd.Length; i++)
+			{
+				Assert.IsTrue(sd[i - 1].score >= sd[i].score);
+			}
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestTopScoreDocCollector.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestTopScoreDocCollector.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestTopScoreDocCollector.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestTopScoreDocCollector.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,101 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Document = Lucene.Net.Documents.Document;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using MaxFieldLength = Lucene.Net.Index.IndexWriter.MaxFieldLength;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using Occur = Lucene.Net.Search.BooleanClause.Occur;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search
+{
+	
+    [TestFixture]
+	public class TestTopScoreDocCollector:LuceneTestCase
+	{
+		
+		public TestTopScoreDocCollector()
+		{
+		}
+		
+		public TestTopScoreDocCollector(System.String name):base(name)
+		{
+		}
+		
+        [Test]
+		public virtual void  TestOutOfOrderCollection()
+		{
+			
+			Directory dir = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(dir, null, MaxFieldLength.UNLIMITED);
+			for (int i = 0; i < 10; i++)
+			{
+				writer.AddDocument(new Document());
+			}
+			writer.Commit();
+			writer.Close();
+			
+			bool[] inOrder = new bool[]{false, true};
+			System.String[] actualTSDCClass = new System.String[]{"OutOfOrderTopScoreDocCollector", "InOrderTopScoreDocCollector"};
+			
+			// Save the original value to set later.
+			bool origVal = BooleanQuery.GetAllowDocsOutOfOrder();
+			
+			BooleanQuery.SetAllowDocsOutOfOrder(true);
+			
+			BooleanQuery bq = new BooleanQuery();
+			// Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
+			// which delegates to BS if there are no mandatory clauses.
+			bq.Add(new MatchAllDocsQuery(), Occur.SHOULD);
+			// Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
+			// the clause instead of BQ.
+			bq.SetMinimumNumberShouldMatch(1);
+			try
+			{
+				
+				IndexSearcher searcher = new IndexSearcher(dir);
+				for (int i = 0; i < inOrder.Length; i++)
+				{
+					TopDocsCollector tdc = TopScoreDocCollector.create(3, inOrder[i]);
+					Assert.AreEqual("Lucene.Net.Search.TopScoreDocCollector$" + actualTSDCClass[i], tdc.GetType().FullName);
+					
+					searcher.Search(new MatchAllDocsQuery(), tdc);
+					
+					ScoreDoc[] sd = tdc.TopDocs().scoreDocs;
+					Assert.AreEqual(3, sd.Length);
+					for (int j = 0; j < sd.Length; j++)
+					{
+						Assert.AreEqual(j, sd[j].doc, "expected doc Id " + j + " found " + sd[j].doc);
+					}
+				}
+			}
+			finally
+			{
+				// Whatever happens, reset BooleanQuery.allowDocsOutOfOrder to the
+				// original value. Don't set it to false in case the implementation in BQ
+				// will change some day.
+				BooleanQuery.SetAllowDocsOutOfOrder(origVal);
+			}
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestWildcard.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestWildcard.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestWildcard.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestWildcard.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,14 +19,16 @@
 
 using NUnit.Framework;
 
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
+using Index = Lucene.Net.Documents.Field.Index;
+using Store = Lucene.Net.Documents.Field.Store;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using QueryParser = Lucene.Net.QueryParsers.QueryParser;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
@@ -35,11 +37,11 @@
 	/// <summary> TestWildcard tests the '*' and '?' wildcard characters.
 	/// 
 	/// </summary>
-	/// <version>  $Id: TestWildcard.java 583534 2007-10-10 16:46:35Z mikemccand $
+	/// <version>  $Id: TestWildcard.java 694004 2008-09-10 21:38:52Z mikemccand $
 	/// 
 	/// </version>
-	[TestFixture]
-	public class TestWildcard : LuceneTestCase
+    [TestFixture]
+	public class TestWildcard:LuceneTestCase
 	{
 		[Test]
 		public virtual void  TestEquals()
@@ -144,7 +146,7 @@
 			IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			for (int i = 0; i < contents.Length; ++i)
 			{
-				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+				Document doc = new Document();
 				doc.Add(new Field(field, contents[i], Field.Store.YES, Field.Index.ANALYZED));
 				writer.AddDocument(doc);
 			}
@@ -171,7 +173,7 @@
 		{
 			System.String field = "content";
 			bool dbg = false;
-			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser(field, new WhitespaceAnalyzer());
+			QueryParser qp = new QueryParser(field, new WhitespaceAnalyzer());
 			qp.SetAllowLeadingWildcard(true);
 			System.String[] docs = new System.String[]{"\\ abcdefg1", "\\79 hijklmn1", "\\\\ opqrstu1"};
 			// queries that should find all docs

Modified: incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMDirectory.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Store/MockRAMDirectory.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMDirectory.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMDirectory.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -17,6 +17,8 @@
 
 using System;
 
+using NUnit.Framework;
+
 namespace Lucene.Net.Store
 {
 	
@@ -27,7 +29,7 @@
 	/// </version>
 	
 	[Serializable]
-	public class MockRAMDirectory : RAMDirectory
+	public class MockRAMDirectory:RAMDirectory
 	{
 		internal long maxSize;
 		
@@ -36,111 +38,121 @@
 		internal double randomIOExceptionRate;
 		internal System.Random randomState;
 		internal bool noDeleteOpenFile = true;
-        internal bool preventDoubleWrite = true;
-        private System.Collections.Generic.IDictionary<string, string> unSyncedFiles;
-        private System.Collections.Generic.IDictionary<string, string> createdFiles;
-        internal volatile bool crashed;
-
+		internal bool preventDoubleWrite = true;
+		private System.Collections.Hashtable unSyncedFiles;
+		private System.Collections.Hashtable createdFiles;
+		internal volatile bool crashed;
+		
 		// NOTE: we cannot initialize the Map here due to the
 		// order in which our constructor actually does this
 		// member initialization vs when it calls super.  It seems
 		// like super is called, then our members are initialized:
 		internal System.Collections.IDictionary openFiles;
-
-        private void Init()
-        {
-            if (openFiles == null)
-                openFiles = new System.Collections.Hashtable();
-            if (createdFiles == null)
-                createdFiles = new System.Collections.Generic.Dictionary<string, string>();
-            if (unSyncedFiles == null)
-                unSyncedFiles = new System.Collections.Generic.Dictionary<string, string>();
-
-        }
-
-		public MockRAMDirectory() : base()
+		
+		private void  Init()
 		{
-            Init();
+			lock (this)
+			{
+				if (openFiles == null)
+				{
+					openFiles = new System.Collections.Hashtable();
+				}
+				if (createdFiles == null)
+				{
+					createdFiles = new System.Collections.Hashtable();
+				}
+				if (unSyncedFiles == null)
+				{
+					unSyncedFiles = new System.Collections.Hashtable();
+				}
+			}
 		}
-		public MockRAMDirectory(System.String dir) : base(dir)
+		
+		public MockRAMDirectory():base()
 		{
-            Init();
-        }
-		public MockRAMDirectory(Directory dir) : base(dir)
+			Init();
+		}
+		public MockRAMDirectory(System.String dir):base(dir)
 		{
-            Init();
-        }
-		public MockRAMDirectory(System.IO.FileInfo dir) : base(dir)
+			Init();
+		}
+		public MockRAMDirectory(Directory dir):base(dir)
 		{
-            Init();
-        }
-
-        /// <summary>
-        /// If set to true, we throw an IOException if the same file is opened by createOutput, ever.
-        /// </summary>
-        /// <param name="value"></param>
-        public void SetPreventDoubleWrite(bool value)
-        {
-            preventDoubleWrite = value;
-        }
-
-        override public void Sync(string name)
-        {
-            lock (this)
-            {
-                MaybeThrowDeterministicException();
-                if (crashed)
-                    throw new System.IO.IOException("cannot sync after crash");
-                if (unSyncedFiles.ContainsKey(name))
-                    unSyncedFiles.Remove(name);
-            }
-        }
-
-        /// <summary>
-        /// Simulates a crash of OS or machine by overwriting unsynced files.
-        /// </summary>
-        public void Crash()
-        {
-            lock (this)
-            {
-                crashed = true;
-                openFiles = new System.Collections.Hashtable();
-            }
-            System.Collections.Generic.IEnumerator<string> it = unSyncedFiles.Keys.GetEnumerator();
-            unSyncedFiles = new System.Collections.Generic.Dictionary<string, string>();
-            int count = 0;
-            while (it.MoveNext())
-            {
-                string name = it.Current;
-                RAMFile file = (RAMFile)fileMap_ForNUnitTest[name];
-                if (count % 3 == 0)
-                {
-                    DeleteFile(name, true);
-                }
-                else if (count % 3 == 1)
-                {
-                    // Zero out file entirely
-                    int numBuffers = file.NumBuffers_ForNUnitTest();
-                    for (int i = 0; i < numBuffers; i++)
-                    {
-                        byte[] buffer = file.GetBuffer_ForNUnitTest(i);
-                        SupportClass.CollectionsSupport.ArrayFill(buffer, (byte)0);
-                    }
-                }
-                else if (count % 3 == 2)
-                {
-                    // truncate the file:
-                    file.SetLength_ForNUnitTest(file.GetLength_ForNUnitTest() / 2);
-                }
-                count++;
-            }
-        }
-
-        public void ClearCrash()
-        {
-            lock (this) { crashed = false; }
-        }
-
+			Init();
+		}
+		public MockRAMDirectory(System.IO.FileInfo dir):base(dir)
+		{
+			Init();
+		}
+		
+		/// <summary>If set to true, we throw an IOException if the same
+		/// file is opened by createOutput, ever. 
+		/// </summary>
+		public virtual void  SetPreventDoubleWrite(bool value_Renamed)
+		{
+			preventDoubleWrite = value_Renamed;
+		}
+		
+		public override void  Sync(System.String name)
+		{
+			lock (this)
+			{
+				MaybeThrowDeterministicException();
+				if (crashed)
+					throw new System.IO.IOException("cannot sync after crash");
+				if (unSyncedFiles.Contains(name))
+					unSyncedFiles.Remove(name);
+			}
+		}
+		
+		/// <summary>Simulates a crash of OS or machine by overwriting
+		/// unsynced files. 
+		/// </summary>
+		public virtual void  Crash()
+		{
+			lock (this)
+			{
+				crashed = true;
+				openFiles = new System.Collections.Hashtable();
+				System.Collections.IEnumerator it = unSyncedFiles.GetEnumerator();
+				unSyncedFiles = new System.Collections.Hashtable();
+				int count = 0;
+				while (it.MoveNext())
+				{
+					System.String name = (System.String) it.Current;
+					RAMFile file = (RAMFile) fileMap_ForNUnit[name];
+					if (count % 3 == 0)
+					{
+						DeleteFile(name, true);
+					}
+					else if (count % 3 == 1)
+					{
+						// Zero out file entirely
+						int numBuffers = file.NumBuffers();
+						for (int i = 0; i < numBuffers; i++)
+						{
+							byte[] buffer = file.GetBuffer(i);
+							for (int j = 0; j < buffer.Length; j++) buffer[j] = (byte) 0;
+						}
+					}
+					else if (count % 3 == 2)
+					{
+						// Truncate the file:
+						file.SetLength(file.GetLength() / 2);
+					}
+					count++;
+				}
+			}
+		}
+		
+		public virtual void  ClearCrash()
+		{
+			lock (this)
+			{
+				crashed = false;
+			}
+		}
+		
 		public virtual void  SetMaxSizeInBytes(long maxSize)
 		{
 			this.maxSize = maxSize;
@@ -206,86 +218,76 @@
 		{
 			lock (this)
 			{
-                DeleteFile(name, false);
-            }
-        }
-
-        private void DeleteFile(string name, bool forced)
-        {
-            lock (this)
-            {
-                MaybeThrowDeterministicException();
-
-                if (crashed && !forced)
-                    throw new System.IO.IOException("cannot delete after crash");
-
-                if (unSyncedFiles.ContainsKey(name))
-                    unSyncedFiles.Remove(name);
-
-                if (!forced)
-                {
-                    lock (openFiles.SyncRoot)
-                    {
-                        if (noDeleteOpenFile && openFiles.Contains(name))
-                        {
-                            throw new System.IO.IOException("MockRAMDirectory: file \"" + name + "\" is still open: cannot delete");
-                        }
-                    }
-                }
+				DeleteFile(name, false);
+			}
+		}
+		
+		private void  DeleteFile(System.String name, bool forced)
+		{
+			lock (this)
+			{
+				
+				MaybeThrowDeterministicException();
+				
+				if (crashed && !forced)
+					throw new System.IO.IOException("cannot delete after crash");
+				
+				if (unSyncedFiles.Contains(name))
+					unSyncedFiles.Remove(name);
+				if (!forced)
+				{
+					if (noDeleteOpenFile && openFiles.Contains(name))
+					{
+						throw new System.IO.IOException("MockRAMDirectory: file \"" + name + "\" is still open: cannot delete");
+					}
+				}
 				base.DeleteFile(name);
 			}
 		}
 		
 		public override IndexOutput CreateOutput(System.String name)
 		{
-            if (crashed)
-                throw new System.IO.IOException("cannot create output after crash");
-            Init();
-            lock (openFiles.SyncRoot)
-            {
-                if (preventDoubleWrite && createdFiles.ContainsKey(name) && !name.Equals("segments.gen"))
-                    throw new System.IO.IOException("file \"" + name + "\" is still open: cannot overwrite");
-                if (noDeleteOpenFile && openFiles.Contains(name))
-                    throw new System.IO.IOException("MockRAMDirectory: file \"" + name + "\" is still open: cannot overwrite");
-            }
-			RAMFile file = new RAMFile(this);
-			lock (this)
-			{
-                if (crashed)
-                    throw new System.IO.IOException("cannot create output after crash");
-                unSyncedFiles[name] = name;
-                createdFiles[name] = name;
-                RAMFile existing = (RAMFile)fileMap_ForNUnitTest[name];
+			lock (this)
+			{
+				if (crashed)
+					throw new System.IO.IOException("cannot createOutput after crash");
+				Init();
+				if (preventDoubleWrite && createdFiles.Contains(name) && !name.Equals("segments.gen"))
+					throw new System.IO.IOException("file \"" + name + "\" was already written to");
+				if (noDeleteOpenFile && openFiles.Contains(name))
+					throw new System.IO.IOException("MockRAMDirectory: file \"" + name + "\" is still open: cannot overwrite");
+				RAMFile file = new RAMFile(this);
+				if (crashed)
+					throw new System.IO.IOException("cannot createOutput after crash");
+				SupportClass.CollectionsHelper.AddIfNotContains(unSyncedFiles, name);
+				SupportClass.CollectionsHelper.AddIfNotContains(createdFiles, name);
+				RAMFile existing = (RAMFile) fileMap_ForNUnit[name];
 				// Enforce write once:
-				if (existing != null && !name.Equals("segments.gen"))
+				if (existing != null && !name.Equals("segments.gen") && preventDoubleWrite)
 					throw new System.IO.IOException("file " + name + " already exists");
 				else
 				{
 					if (existing != null)
 					{
-						sizeInBytes_ForNUnitTest -= existing.sizeInBytes_ForNUnitTest;
-						existing.directory_ForNUnitTest = null;
+						sizeInBytes_ForNUnitTest -= existing.sizeInBytes_ForNUnit;
+						existing.directory_ForNUnit = null;
 					}
-
-					fileMap_ForNUnitTest[name] = file;
+					
+					fileMap_ForNUnit[name] = file;
 				}
+				
+				return new MockRAMOutputStream(this, file, name);
 			}
-			
-			return new MockRAMOutputStream(this, file);
 		}
 		
 		public override IndexInput OpenInput(System.String name)
 		{
-			RAMFile file;
 			lock (this)
 			{
-				file = (RAMFile)fileMap_ForNUnitTest[name];
-			}
-			if (file == null)
-				throw new System.IO.FileNotFoundException(name);
-			else
-			{
-				lock (openFiles.SyncRoot)
+				RAMFile file = (RAMFile) fileMap_ForNUnit[name];
+				if (file == null)
+					throw new System.IO.FileNotFoundException(name);
+				else
 				{
 					if (openFiles.Contains(name))
 					{
@@ -298,8 +300,8 @@
 						openFiles[name] = 1;
 					}
 				}
+				return new MockRAMInputStream(this, name, file);
 			}
-			return new MockRAMInputStream(this, name, file);
 		}
 		
 		/// <summary>Provided for testing purposes.  Use sizeInBytes() instead. </summary>
@@ -308,10 +310,10 @@
 			lock (this)
 			{
 				long size = 0;
-				System.Collections.IEnumerator it = fileMap_ForNUnitTest.Values.GetEnumerator();
+				System.Collections.IEnumerator it = fileMap_ForNUnit.Values.GetEnumerator();
 				while (it.MoveNext())
 				{
-					size += ((RAMFile)it.Current).GetSizeInBytes_ForNUnitTest();
+					size += ((RAMFile) it.Current).GetSizeInBytes();
 				}
 				return size;
 			}
@@ -328,10 +330,10 @@
 			lock (this)
 			{
 				long size = 0;
-				System.Collections.IEnumerator it = fileMap_ForNUnitTest.Values.GetEnumerator();
+				System.Collections.IEnumerator it = fileMap_ForNUnit.Values.GetEnumerator();
 				while (it.MoveNext())
 				{
-					size += ((RAMFile)it.Current).length_ForNUnitTest;
+					size += ((RAMFile) it.Current).length_ForNUnit;
 				}
 				return size;
 			}
@@ -339,17 +341,17 @@
 		
 		public override void  Close()
 		{
-			if (openFiles == null)
-			{
-				openFiles = new System.Collections.Hashtable();
-			}
-			lock (openFiles.SyncRoot)
+			lock (this)
 			{
+				if (openFiles == null)
+				{
+					openFiles = new System.Collections.Hashtable();
+				}
 				if (noDeleteOpenFile && openFiles.Count > 0)
 				{
 					// RuntimeException instead of IOException because
 					// super() does not throw IOException currently:
-					throw new System.SystemException("MockRAMDirectory: cannot close: there are still open files: " + openFiles.ToString());
+					throw new System.SystemException("MockRAMDirectory: cannot close: there are still open files: " + SupportClass.CollectionsHelper.CollectionToString(openFiles));
 				}
 			}
 		}

Modified: incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMInputStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Store/MockRAMInputStream.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMInputStream.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMInputStream.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -17,6 +17,8 @@
 
 using System;
 
+using NUnit.Framework;
+
 namespace Lucene.Net.Store
 {
 	
@@ -24,7 +26,7 @@
 	/// keeps track of when it's been closed.
 	/// </summary>
 	
-	public class MockRAMInputStream : RAMInputStream, System.ICloneable
+	public class MockRAMInputStream:RAMInputStream, System.ICloneable
 	{
 		private MockRAMDirectory dir;
 		private System.String name;
@@ -46,22 +48,22 @@
 			// all clones get closed:
 			if (!isClone)
 			{
-				lock (dir.openFiles.SyncRoot)
+				lock (dir)
 				{
-                    // could be null when MockRAMDIrectory.Crash() was called
-                    if (dir.openFiles[name] != null)
-                    {
-                        System.Int32 v = (System.Int32)dir.openFiles[name];
-                        if (v == 1)
-                        {
-                            dir.openFiles.Remove(name);
-                        }
-                        else
-                        {
-                            v = (System.Int32)(v - 1);
-                            dir.openFiles[name] = v;
-                        }
-                    }
+					// Could be null when MockRAMDirectory.crash() was called
+					if (dir.openFiles[name] != null)
+					{
+						System.Int32 v = (System.Int32) dir.openFiles[name];
+						if (v == 1)
+						{
+							dir.openFiles.Remove(name);
+						}
+						else
+						{
+							v = (System.Int32) (v - 1);
+							dir.openFiles[name] = v;
+						}
+					}
 				}
 			}
 		}

Modified: incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMOutputStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Store/MockRAMOutputStream.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMOutputStream.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Store/MockRAMOutputStream.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -17,6 +17,8 @@
 
 using System;
 
+using NUnit.Framework;
+
 namespace Lucene.Net.Store
 {
 	
@@ -26,17 +28,19 @@
 	/// IOExceptions.
 	/// </summary>
 	
-	public class MockRAMOutputStream : RAMOutputStream
+	public class MockRAMOutputStream:RAMOutputStream
 	{
 		private MockRAMDirectory dir;
 		private bool first = true;
+		private System.String name;
 		
 		internal byte[] singleByte = new byte[1];
 		
 		/// <summary>Construct an empty output buffer. </summary>
-		public MockRAMOutputStream(MockRAMDirectory dir, RAMFile f) : base(f)
+		public MockRAMOutputStream(MockRAMDirectory dir, RAMFile f, System.String name):base(f)
 		{
 			this.dir = dir;
+			this.name = name;
 		}
 		
 		public override void  Close()
@@ -69,11 +73,11 @@
 			long freeSpace = dir.maxSize - dir.SizeInBytes();
 			long realUsage = 0;
 			
-
-            // If MockRAMDirectory crashed since we were opened, then don't write anything
-            if (dir.crashed)
-                throw new System.IO.IOException("MockRAMDirectory was crashed");
-
+			// If MockRAMDir crashed since we were opened, then
+			// don't write anything:
+			if (dir.crashed)
+				throw new System.IO.IOException("MockRAMDirectory was crashed; cannot write to " + name);
+			
 			// Enforce disk full:
 			if (dir.maxSize != 0 && freeSpace <= len)
 			{
@@ -94,7 +98,7 @@
 				{
 					dir.maxUsedSize = realUsage;
 				}
-				throw new System.IO.IOException("fake disk full at " + dir.GetRecomputedActualSizeInBytes() + " bytes");
+				throw new System.IO.IOException("fake disk full at " + dir.GetRecomputedActualSizeInBytes() + " bytes when writing " + name);
 			}
 			else
 			{

Modified: incubator/lucene.net/trunk/C#/src/Test/Store/TestBufferedIndexInput.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Store/TestBufferedIndexInput.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Store/TestBufferedIndexInput.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Store/TestBufferedIndexInput.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -25,6 +25,9 @@
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
+using NIOFSIndexInput = Lucene.Net.Store.NIOFSDirectory.NIOFSIndexInput;
+using SimpleFSIndexInput = Lucene.Net.Store.SimpleFSDirectory.SimpleFSIndexInput;
+using ArrayUtil = Lucene.Net.Util.ArrayUtil;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using ScoreDoc = Lucene.Net.Search.ScoreDoc;
 using TermQuery = Lucene.Net.Search.TermQuery;
@@ -33,9 +36,34 @@
 
 namespace Lucene.Net.Store
 {
+	
 	[TestFixture]
-	public class TestBufferedIndexInput : LuceneTestCase
+	public class TestBufferedIndexInput:LuceneTestCase
 	{
+		
+		private static void  WriteBytes(System.IO.FileInfo aFile, long size)
+		{
+			System.IO.Stream stream = null;
+			try
+			{
+				stream = new System.IO.FileStream(aFile.FullName, System.IO.FileMode.Create);
+				for (int i = 0; i < size; i++)
+				{
+					stream.WriteByte((byte) Byten(i));
+				}
+				stream.Flush();
+			}
+			finally
+			{
+				if (stream != null)
+				{
+					stream.Close();
+				}
+			}
+		}
+		
+		private const long TEST_FILE_LENGTH = 1024 * 1024;
+		
 		// Call readByte() repeatedly, past the buffer boundary, and see that it
 		// is working as expected.
 		// Our input comes from a dynamically generated/ "file" - see
@@ -57,42 +85,112 @@
 		[Test]
 		public virtual void  TestReadBytes()
 		{
+			System.Random r = NewRandom();
+			
 			MyBufferedIndexInput input = new MyBufferedIndexInput();
+			RunReadBytes(input, BufferedIndexInput.BUFFER_SIZE, r);
+			
+			// This tests the workaround code for LUCENE-1566 where readBytesInternal
+			// provides a workaround for a JVM Bug that incorrectly raises a OOM Error
+			// when a large byte buffer is passed to a file read.
+			// NOTE: this does only test the chunked reads and NOT if the Bug is triggered.
+			//final int tmpFileSize = 1024 * 1024 * 5;
+			int inputBufferSize = 128;
+			
+            System.String tempDirectory = System.IO.Path.GetTempPath();
+
+			System.IO.FileInfo tmpInputFile = new System.IO.FileInfo(System.IO.Path.Combine(tempDirectory, "IndexInput.tmpFile"));
+			System.IO.File.Delete(tmpInputFile.FullName);
+			WriteBytes(tmpInputFile, TEST_FILE_LENGTH);
+			
+			// run test with chunk size of 10 bytes
+			RunReadBytesAndClose(new SimpleFSIndexInput(tmpInputFile, inputBufferSize, 10), inputBufferSize, r);
+			// run test with chunk size of 100 MB - default
+			RunReadBytesAndClose(new SimpleFSIndexInput(tmpInputFile, inputBufferSize), inputBufferSize, r);
+			// run test with chunk size of 10 bytes
+			RunReadBytesAndClose(new NIOFSIndexInput(tmpInputFile, inputBufferSize, 10), inputBufferSize, r);
+			// run test with chunk size of 100 MB - default
+			RunReadBytesAndClose(new NIOFSIndexInput(tmpInputFile, inputBufferSize), inputBufferSize, r);
+		}
+		
+		private void  RunReadBytesAndClose(IndexInput input, int bufferSize, System.Random r)
+		{
+			try
+			{
+				RunReadBytes(input, bufferSize, r);
+			}
+			finally
+			{
+				input.Close();
+			}
+		}
+		
+		private void  RunReadBytes(IndexInput input, int bufferSize, System.Random r)
+		{
+			
 			int pos = 0;
 			// gradually increasing size:
-			for (int size = 1; size < BufferedIndexInput.BUFFER_SIZE * 10; size = size + size / 200 + 1)
+			for (int size = 1; size < bufferSize * 10; size = size + size / 200 + 1)
 			{
 				CheckReadBytes(input, size, pos);
 				pos += size;
+				if (pos >= TEST_FILE_LENGTH)
+				{
+					// wrap
+					pos = 0;
+					input.Seek(0L);
+				}
 			}
 			// wildly fluctuating size:
 			for (long i = 0; i < 1000; i++)
 			{
-				// The following function generates a fluctuating (but repeatable)
-				// size, sometimes small (<100) but sometimes large (>10000)
-				int size1 = (int) (i % 7 + 7 * (i % 5) + 7 * 5 * (i % 3) + 5 * 5 * 3 * (i % 2));
-				int size2 = (int) (i % 11 + 11 * (i % 7) + 11 * 7 * (i % 5) + 11 * 7 * 5 * (i % 3) + 11 * 7 * 5 * 3 * (i % 2));
-				int size = (i % 3 == 0)?size2 * 10:size1;
-				CheckReadBytes(input, size, pos);
-				pos += size;
+				int size = r.Next(10000);
+				CheckReadBytes(input, 1 + size, pos);
+				pos += 1 + size;
+				if (pos >= TEST_FILE_LENGTH)
+				{
+					// wrap
+					pos = 0;
+					input.Seek(0L);
+				}
 			}
 			// constant small size (7 bytes):
-			for (int i = 0; i < BufferedIndexInput.BUFFER_SIZE; i++)
+			for (int i = 0; i < bufferSize; i++)
 			{
 				CheckReadBytes(input, 7, pos);
 				pos += 7;
+				if (pos >= TEST_FILE_LENGTH)
+				{
+					// wrap
+					pos = 0;
+					input.Seek(0L);
+				}
 			}
 		}
-		private void  CheckReadBytes(BufferedIndexInput input, int size, int pos)
+		
+		private byte[] buffer = new byte[10];
+		
+		private void  CheckReadBytes(IndexInput input, int size, int pos)
 		{
 			// Just to see that "offset" is treated properly in readBytes(), we
 			// add an arbitrary offset at the beginning of the array
 			int offset = size % 10; // arbitrary
-			byte[] b = new byte[offset + size];
-			input.ReadBytes(b, offset, size);
+			buffer = ArrayUtil.Grow(buffer, offset + size);
+			Assert.AreEqual(pos, input.GetFilePointer());
+			long left = TEST_FILE_LENGTH - input.GetFilePointer();
+			if (left <= 0)
+			{
+				return ;
+			}
+			else if (left < size)
+			{
+				size = (int) left;
+			}
+			input.ReadBytes(buffer, offset, size);
+			Assert.AreEqual(pos + size, input.GetFilePointer());
 			for (int i = 0; i < size; i++)
 			{
-				Assert.AreEqual(b[offset + i], Byten(pos + i));
+				Assert.AreEqual(Byten(pos + i), buffer[offset + i], "pos=" + i + " filepos=" + (pos + i));
 			}
 		}
 		
@@ -116,7 +214,7 @@
 				CheckReadBytes(input, 11, pos);
 				Assert.Fail("Block read past end of file");
 			}
-			catch (System.IO.IOException)
+			catch (System.IO.IOException e)
 			{
 				/* success */
 			}
@@ -126,7 +224,7 @@
 				CheckReadBytes(input, 50, pos);
 				Assert.Fail("Block read past end of file");
 			}
-			catch (System.IO.IOException)
+			catch (System.IO.IOException e)
 			{
 				/* success */
 			}
@@ -136,20 +234,19 @@
 				CheckReadBytes(input, 100000, pos);
 				Assert.Fail("Block read past end of file");
 			}
-			catch (System.IO.IOException)
+			catch (System.IO.IOException e)
 			{
 				/* success */
 			}
 		}
 		
-		// byten emulates a file - Byten(n) returns the n'th byte in that file.
+		// byten emulates a file - byten(n) returns the n'th byte in that file.
 		// MyBufferedIndexInput reads this "file".
 		private static byte Byten(long n)
 		{
 			return (byte) (n * n % 256);
 		}
-
-		private class MyBufferedIndexInput : BufferedIndexInput
+		private class MyBufferedIndexInput:BufferedIndexInput
 		{
 			private long pos;
 			private long len;
@@ -158,17 +255,16 @@
 				this.len = len;
 				this.pos = 0;
 			}
-			public MyBufferedIndexInput() : this(System.Int64.MaxValue)
+			public MyBufferedIndexInput():this(System.Int64.MaxValue)
 			{
 			}
-
-			protected override void  ReadInternal(byte[] b, int offset, int length)
+			public override void  ReadInternal(byte[] b, int offset, int length)
 			{
 				for (int i = offset; i < offset + length; i++)
 					b[i] = Lucene.Net.Store.TestBufferedIndexInput.Byten(pos++);
 			}
 			
-			protected override void  SeekInternal(long pos)
+			public override void  SeekInternal(long pos)
 			{
 				this.pos = pos;
 			}
@@ -187,7 +283,7 @@
 		public virtual void  TestSetBufferSize()
 		{
 			System.IO.FileInfo indexDir = new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", ""), "testSetBufferSize"));
-			MockFSDirectory dir = new MockFSDirectory(indexDir);
+			MockFSDirectory dir = new MockFSDirectory(indexDir, NewRandom());
 			try
 			{
 				IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
@@ -210,21 +306,21 @@
 				Assert.AreEqual(37, reader.DocFreq(ccc));
 				reader.DeleteDocument(0);
 				Assert.AreEqual(37, reader.DocFreq(aaa));
-				dir.TweakBufferSizes();
+				dir.tweakBufferSizes();
 				reader.DeleteDocument(4);
 				Assert.AreEqual(reader.DocFreq(bbb), 37);
-				dir.TweakBufferSizes();
+				dir.tweakBufferSizes();
 				
 				IndexSearcher searcher = new IndexSearcher(reader);
 				ScoreDoc[] hits = searcher.Search(new TermQuery(bbb), null, 1000).scoreDocs;
-				dir.TweakBufferSizes();
+				dir.tweakBufferSizes();
 				Assert.AreEqual(35, hits.Length);
-				dir.TweakBufferSizes();
+				dir.tweakBufferSizes();
 				hits = searcher.Search(new TermQuery(new Term("id", "33")), null, 1000).scoreDocs;
-				dir.TweakBufferSizes();
+				dir.tweakBufferSizes();
 				Assert.AreEqual(1, hits.Length);
 				hits = searcher.Search(new TermQuery(aaa), null, 1000).scoreDocs;
-				dir.TweakBufferSizes();
+				dir.tweakBufferSizes();
 				Assert.AreEqual(35, hits.Length);
 				searcher.Close();
 				reader.Close();
@@ -235,19 +331,20 @@
 			}
 		}
 		
-		private class MockFSDirectory : Directory
+		private class MockFSDirectory:Directory
 		{
 			
 			internal System.Collections.IList allIndexInputs = new System.Collections.ArrayList();
-
-            internal System.Random rand = new System.Random(788);
+			
+			internal System.Random rand;
 			
 			private Directory dir;
 			
-			public MockFSDirectory(System.IO.FileInfo path)
+			public MockFSDirectory(System.IO.FileInfo path, System.Random rand)
 			{
+				this.rand = rand;
 				lockFactory = new NoLockFactory();
-				dir = FSDirectory.GetDirectory(path);
+				dir = new SimpleFSDirectory(path, null);
 			}
 			
 			public override IndexInput OpenInput(System.String name)
@@ -255,7 +352,7 @@
 				return OpenInput(name, BufferedIndexInput.BUFFER_SIZE);
 			}
 			
-			public virtual void  TweakBufferSizes()
+			public virtual void  tweakBufferSizes()
 			{
 				System.Collections.IEnumerator it = allIndexInputs.GetEnumerator();
 				//int count = 0;
@@ -308,6 +405,10 @@
 			{
 				return dir.List();
 			}
+			public override System.String[] ListAll()
+			{
+				return dir.ListAll();
+			}
 			
 			public override long FileLength(System.String name)
 			{