You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ar...@apache.org on 2009/11/03 19:06:38 UTC

svn commit: r832486 [15/29] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene...

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestTransactionRollback.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestTransactionRollback.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestTransactionRollback.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestTransactionRollback.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,287 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using MaxFieldLength = Lucene.Net.Index.IndexWriter.MaxFieldLength;
+using Directory = Lucene.Net.Store.Directory;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Index
+{
+	
+	/// <summary> Test class to illustrate using IndexDeletionPolicy to provide multi-level rollback capability.
+	/// This test case creates an index of records 1 to 100, introducing a commit point every 10 records.
+	/// 
+	/// A "keep all" deletion policy is used to ensure we keep all commit points for testing purposes
+	/// </summary>
+	
+    [TestFixture]
+	public class TestTransactionRollback:LuceneTestCase
+	{
+		
+		private const System.String FIELD_RECORD_ID = "record_id";
+		private Directory dir;
+		
+		
+		//Rolls back index to a chosen ID
+		private void  RollBackLast(int id)
+		{
+			
+			// System.out.println("Attempting to rollback to "+id);
+			System.String ids = "-" + id;
+			IndexCommit last = null;
+			System.Collections.ICollection commits = IndexReader.ListCommits(dir);
+			for (System.Collections.IEnumerator iterator = commits.GetEnumerator(); iterator.MoveNext(); )
+			{
+				IndexCommit commit = (IndexCommit) iterator.Current;
+				System.Collections.IDictionary ud = commit.GetUserData();
+				if (ud.Count > 0)
+					if (((System.String) ud["index"]).EndsWith(ids))
+						last = commit;
+			}
+			
+			if (last == null)
+				throw new System.SystemException("Couldn't find commit point " + id);
+			
+			IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), new RollbackDeletionPolicy(this, id), MaxFieldLength.UNLIMITED, last);
+			System.Collections.IDictionary data = new System.Collections.Hashtable();
+			data["index"] = "Rolled back to 1-" + id;
+			w.Commit(data);
+			w.Close();
+		}
+		
+        [Test]
+		public virtual void  TestRepeatedRollBacks()
+		{
+			
+			int expectedLastRecordId = 100;
+			while (expectedLastRecordId > 10)
+			{
+				expectedLastRecordId -= 10;
+				RollBackLast(expectedLastRecordId);
+				
+				System.Collections.BitArray expecteds = new System.Collections.BitArray((100 % 64 == 0?100 / 64:100 / 64 + 1) * 64);
+                for (int i = 1; i < (expectedLastRecordId + 1); i++) { expecteds.Set(i, true); }
+				CheckExpecteds(expecteds);
+			}
+		}
+		
+		private void  CheckExpecteds(System.Collections.BitArray expecteds)
+		{
+			IndexReader r = IndexReader.Open(dir);
+			
+			//Perhaps not the most efficient approach but meets our needs here.
+			for (int i = 0; i < r.MaxDoc(); i++)
+			{
+				if (!r.IsDeleted(i))
+				{
+					System.String sval = r.Document(i).Get(FIELD_RECORD_ID);
+					if (sval != null)
+					{
+						int val = System.Int32.Parse(sval);
+						Assert.IsTrue(expecteds.Get(val), "Did not expect document #" + val);
+						expecteds.Set(val, true);
+					}
+				}
+			}
+			r.Close();
+			Assert.AreEqual(0, SupportClass.BitSetSupport.Cardinality(expecteds), "Should have 0 docs remaining ");
+		}
+		
+		/*
+		private void showAvailableCommitPoints() throws Exception {
+		Collection commits = IndexReader.listCommits(dir);
+		for (Iterator iterator = commits.iterator(); iterator.hasNext();) {
+		IndexCommit comm = (IndexCommit) iterator.next();
+		System.out.print("\t Available commit point:["+comm.getUserData()+"] files=");
+		Collection files = comm.getFileNames();
+		for (Iterator iterator2 = files.iterator(); iterator2.hasNext();) {
+		String filename = (String) iterator2.next();
+		System.out.print(filename+", ");				
+		}
+		System.out.println();
+		}
+		}
+		*/
+		
+		[SetUp]
+		public override void  SetUp()
+		{
+			base.SetUp();
+			dir = new MockRAMDirectory();
+			
+			//Build index, of records 1 to 100, committing after each batch of 10
+			IndexDeletionPolicy sdp = new KeepAllDeletionPolicy(this);
+			IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), sdp, MaxFieldLength.UNLIMITED);
+			for (int currentRecordId = 1; currentRecordId <= 100; currentRecordId++)
+			{
+				Document doc = new Document();
+				doc.Add(new Field(FIELD_RECORD_ID, "" + currentRecordId, Field.Store.YES, Field.Index.ANALYZED));
+				w.AddDocument(doc);
+				
+				if (currentRecordId % 10 == 0)
+				{
+					System.Collections.IDictionary data = new System.Collections.Hashtable();
+					data["index"] = "records 1-" + currentRecordId;
+					w.Commit(data);
+				}
+			}
+			
+			w.Close();
+		}
+		
+		// Rolls back to previous commit point
+		internal class RollbackDeletionPolicy : IndexDeletionPolicy
+		{
+			private void  InitBlock(TestTransactionRollback enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTransactionRollback enclosingInstance;
+			public TestTransactionRollback Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			private int rollbackPoint;
+			
+			public RollbackDeletionPolicy(TestTransactionRollback enclosingInstance, int rollbackPoint)
+			{
+				InitBlock(enclosingInstance);
+				this.rollbackPoint = rollbackPoint;
+			}
+			
+			public virtual void  OnCommit(System.Collections.IList commits)
+			{
+			}
+			
+			public virtual void  OnInit(System.Collections.IList commits)
+			{
+				for (System.Collections.IEnumerator iterator = commits.GetEnumerator(); iterator.MoveNext(); )
+				{
+					IndexCommit commit = (IndexCommit) iterator.Current;
+					System.Collections.IDictionary userData = commit.GetUserData();
+					if (userData.Count > 0)
+					{
+						// Label for a commit point is "Records 1-30"
+						// This code reads the last id ("30" in this example) and deletes it
+						// if it is after the desired rollback point
+						System.String x = (System.String) userData["index"];
+						System.String lastVal = x.Substring(x.LastIndexOf("-") + 1);
+						int last = System.Int32.Parse(lastVal);
+						if (last > rollbackPoint)
+						{
+							/*
+							System.out.print("\tRolling back commit point:" +
+							" UserData="+commit.getUserData() +")  ("+(commits.size()-1)+" commit points left) files=");
+							Collection files = commit.getFileNames();
+							for (Iterator iterator2 = files.iterator(); iterator2.hasNext();) {
+							System.out.print(" "+iterator2.next());				
+							}
+							System.out.println();
+							*/
+							
+							commit.Delete();
+						}
+					}
+				}
+			}
+		}
+		
+		internal class DeleteLastCommitPolicy : IndexDeletionPolicy
+		{
+			public DeleteLastCommitPolicy(TestTransactionRollback enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestTransactionRollback enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTransactionRollback enclosingInstance;
+			public TestTransactionRollback Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			
+			public virtual void  OnCommit(System.Collections.IList commits)
+			{
+			}
+			
+			public virtual void  OnInit(System.Collections.IList commits)
+			{
+				((IndexCommit) commits[commits.Count - 1]).Delete();
+			}
+		}
+		
+        [Test]
+		public virtual void  TestRollbackDeletionPolicy()
+		{
+			for (int i = 0; i < 2; i++)
+			{
+				// Unless you specify a prior commit point, rollback
+				// should not work:
+				new IndexWriter(dir, new WhitespaceAnalyzer(), new DeleteLastCommitPolicy(this), MaxFieldLength.UNLIMITED).Close();
+				IndexReader r = IndexReader.Open(dir);
+				Assert.AreEqual(100, r.NumDocs());
+				r.Close();
+			}
+		}
+		
+		// Keeps all commit points (used to build index)
+		internal class KeepAllDeletionPolicy : IndexDeletionPolicy
+		{
+			public KeepAllDeletionPolicy(TestTransactionRollback enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestTransactionRollback enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTransactionRollback enclosingInstance;
+			public TestTransactionRollback Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			public virtual void  OnCommit(System.Collections.IList commits)
+			{
+			}
+			public virtual void  OnInit(System.Collections.IList commits)
+			{
+			}
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestTransactions.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestTransactions.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestTransactions.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestTransactions.cs Tue Nov  3 18:06:27 2009
@@ -1,13 +1,13 @@
-/**
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
  * The ASF licenses this file to You under the Apache License, Version 2.0
  * (the "License"); you may not use this file except in compliance with
  * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -15,238 +15,273 @@
  * limitations under the License.
  */
 
+using System;
+
 using NUnit.Framework;
 
-using Lucene.Net.Store;
-using Lucene.Net.Util;
 using Lucene.Net.Analysis;
 using Lucene.Net.Documents;
+using Lucene.Net.Store;
+using Lucene.Net.Util;
+using English = Lucene.Net.Util.English;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Index
 {
+	
     [TestFixture]
-    public class TestTransactions : LuceneTestCase
-    {
-        private static readonly System.Random RANDOM = new System.Random();
-        private static volatile bool doFail;
-
-        private class RandomFailure : MockRAMDirectory.Failure
-        {
-            override public void Eval(MockRAMDirectory dir)
-            {
-                if (TestTransactions.doFail && RANDOM.Next() % 10 <= 3)
-                    throw new System.IO.IOException("now failing randomly but on purpose");
-            }
-        }
-
-        private abstract class TimedThread : SupportClass.ThreadClass
-        {
-            internal bool failed;
-            private static int RUN_TIME_SEC = 6;
-            private TimedThread[] allThreads;
-
-            abstract public void DoWork();
-
-            internal TimedThread(TimedThread[] threads)
-            {
-                this.allThreads = threads;
-            }
-
-            public override void Run()
-            {
-                System.DateTime stopTime = System.DateTime.Now.AddSeconds(RUN_TIME_SEC);
-
-                try
-                {
-                    while (System.DateTime.Now < stopTime && !AnyErrors())
-                        DoWork();
-                }
-                catch (System.Exception e)
-                {
-                    System.Console.Out.WriteLine(System.Threading.Thread.CurrentThread + ": exc");
-                    System.Console.Out.WriteLine(e.StackTrace);
-                    failed = true;
-                }
-            }
-
-            private bool AnyErrors()
-            {
-                for (int i = 0; i < allThreads.Length; i++)
-                    if (allThreads[i] != null && allThreads[i].failed)
-                        return true;
-                return false;
-            }
-        }
-
-        private class IndexerThread : TimedThread
-        {
-            Directory dir1;
-            Directory dir2;
-            object lock_Renamed;
-            int nextID;
-
-            public IndexerThread(object lock_Renamed, Directory dir1, Directory dir2, TimedThread[] threads)
-                : base(threads)
-            {
-                this.lock_Renamed = lock_Renamed;
-                this.dir1 = dir1;
-                this.dir2 = dir2;
-            }
-
-            override public void DoWork()
-            {
-
-                IndexWriter writer1 = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
-                writer1.SetMaxBufferedDocs(3);
-                writer1.SetMergeFactor(2);
-                ((ConcurrentMergeScheduler)writer1.GetMergeScheduler()).SetSuppressExceptions_ForNUnitTest();
-
-                IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
-                // Intentionally use different params so flush/merge
-                // happen @ different times
-                writer2.SetMaxBufferedDocs(2);
-                writer2.SetMergeFactor(3);
-                ((ConcurrentMergeScheduler)writer2.GetMergeScheduler()).SetSuppressExceptions_ForNUnitTest();
-
-                Update(writer1);
-                Update(writer2);
-
-                TestTransactions.doFail = true;
-                try
-                {
-                    lock (lock_Renamed)
-                    {
-                        try
-                        {
-                            writer1.PrepareCommit();
-                        }
-                        catch (System.Exception)
-                        {
-                            writer1.Rollback();
-                            writer2.Rollback();
-                            return;
-                        }
-                        try
-                        {
-                            writer2.PrepareCommit();
-                        }
-                        catch (System.Exception)
-                        {
-                            writer1.Rollback();
-                            writer2.Rollback();
-                            return;
-                        }
-
-                        writer1.Commit();
-                        writer2.Commit();
-                    }
-                }
-                finally
-                {
-                    TestTransactions.doFail = false;
-                }
-
-                writer1.Close();
-                writer2.Close();
-            }
-
-            public void Update(IndexWriter writer)
-            {
-                // Add 10 docs:
-                for (int j = 0; j < 10; j++)
-                {
-                    Document d = new Document();
-                    int n = RANDOM.Next();
-                    d.Add(new Field("id", "" + nextID++, Field.Store.YES, Field.Index.NOT_ANALYZED));
-                    d.Add(new Field("contents", English.IntToEnglish(n), Field.Store.NO, Field.Index.ANALYZED));
-                    writer.AddDocument(d);
-                }
-
-                // Delete 5 docs:
-                int deleteID = nextID - 1;
-                for (int j = 0; j < 5; j++)
-                {
-                    writer.DeleteDocuments(new Term("id", "" + deleteID));
-                    deleteID -= 2;
-                }
-            }
-        }
-
-        private class SearcherThread : TimedThread
-        {
-            Directory dir1;
-            Directory dir2;
-            object lock_Renamed;
-
-            public SearcherThread(object lock_Renamed, Directory dir1, Directory dir2, TimedThread[] threads)
-                : base(threads)
-            {
-                this.lock_Renamed = lock_Renamed;
-                this.dir1 = dir1;
-                this.dir2 = dir2;
-            }
-
-            override public void DoWork()
-            {
-                IndexReader r1, r2;
-                lock (lock_Renamed)
-                {
-                    r1 = IndexReader.Open(dir1);
-                    r2 = IndexReader.Open(dir2);
-                }
-                if (r1.NumDocs() != r2.NumDocs())
-                    throw new System.Exception("doc counts differ: r1=" + r1.NumDocs() + " r2=" + r2.NumDocs());
-                r1.Close();
-                r2.Close();
-            }
-        }
-
-        public void InitIndex(Directory dir)
-        {
-            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
-            for (int j = 0; j < 7; j++)
-            {
-                Document d = new Document();
-                int n = RANDOM.Next();
-                d.Add(new Field("contents", English.IntToEnglish(n), Field.Store.NO, Field.Index.ANALYZED));
-                writer.AddDocument(d);
-            }
-            writer.Close();
-        }
-
-        [Test]
-        public void TestTransactions_Renamed()
-        {
-            MockRAMDirectory dir1 = new MockRAMDirectory();
-            MockRAMDirectory dir2 = new MockRAMDirectory();
-            dir1.SetPreventDoubleWrite(false);
-            dir2.SetPreventDoubleWrite(false);
-            dir1.FailOn(new RandomFailure());
-            dir2.FailOn(new RandomFailure());
-
-            InitIndex(dir1);
-            InitIndex(dir2);
-
-            TimedThread[] threads = new TimedThread[3];
-            int numThread = 0;
-
-            IndexerThread indexerThread = new IndexerThread(this, dir1, dir2, threads);
-            threads[numThread++] = indexerThread;
-            indexerThread.Start();
-
-            SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads);
-            threads[numThread++] = searcherThread1;
-            searcherThread1.Start();
-
-            SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads);
-            threads[numThread++] = searcherThread2;
-            searcherThread2.Start();
-
-            for (int i = 0; i < numThread; i++)
-                threads[i].Join();
-
-            for (int i = 0; i < numThread; i++)
-                Assert.IsTrue(!((TimedThread)threads[i]).failed);
-        }
-    }
-}
+	public class TestTransactions:LuceneTestCase
+	{
+		private System.Random RANDOM;
+		private static volatile bool doFail;
+		
+		private class RandomFailure:MockRAMDirectory.Failure
+		{
+			public RandomFailure(TestTransactions enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestTransactions enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTransactions enclosingInstance;
+			public TestTransactions Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			public override void  Eval(MockRAMDirectory dir)
+			{
+				if (TestTransactions.doFail && Enclosing_Instance.RANDOM.Next() % 10 <= 3)
+					throw new System.IO.IOException("now failing randomly but on purpose");
+			}
+		}
+		
+		abstract public class TimedThread:SupportClass.ThreadClass
+		{
+			internal bool failed;
+			private static int RUN_TIME_SEC = 6;
+			private TimedThread[] allThreads;
+			
+			abstract public void  DoWork();
+			
+			internal TimedThread(TimedThread[] threads)
+			{
+				this.allThreads = threads;
+			}
+			
+			override public void  Run()
+			{
+				long stopTime = System.DateTime.Now.Millisecond + 1000 * RUN_TIME_SEC;
+				
+				try
+				{
+					while (System.DateTime.Now.Millisecond < stopTime && !AnyErrors())
+						DoWork();
+				}
+				catch (System.Exception e)
+				{
+					System.Console.Out.WriteLine(SupportClass.ThreadClass.Current() + ": exc");
+					System.Console.Out.WriteLine(e.StackTrace);
+					failed = true;
+				}
+			}
+			
+			private bool AnyErrors()
+			{
+				for (int i = 0; i < allThreads.Length; i++)
+					if (allThreads[i] != null && allThreads[i].failed)
+						return true;
+				return false;
+			}
+		}
+		
+		private class IndexerThread:TimedThread
+		{
+			private void  InitBlock(TestTransactions enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTransactions enclosingInstance;
+			public TestTransactions Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal Directory dir1;
+			internal Directory dir2;
+			internal System.Object lock_Renamed;
+			internal int nextID;
+			
+			public IndexerThread(TestTransactions enclosingInstance, System.Object lock_Renamed, Directory dir1, Directory dir2, TimedThread[] threads):base(threads)
+			{
+				InitBlock(enclosingInstance);
+				this.lock_Renamed = lock_Renamed;
+				this.dir1 = dir1;
+				this.dir2 = dir2;
+			}
+			
+			public override void  DoWork()
+			{
+				
+				IndexWriter writer1 = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+				writer1.SetMaxBufferedDocs(3);
+				writer1.SetMergeFactor(2);
+				((ConcurrentMergeScheduler) writer1.GetMergeScheduler()).SetSuppressExceptions();
+				
+				IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+				// Intentionally use different params so flush/merge
+				// happen @ different times
+				writer2.SetMaxBufferedDocs(2);
+				writer2.SetMergeFactor(3);
+				((ConcurrentMergeScheduler) writer2.GetMergeScheduler()).SetSuppressExceptions();
+				
+				Update(writer1);
+				Update(writer2);
+				
+				TestTransactions.doFail = true;
+				try
+				{
+					lock (lock_Renamed)
+					{
+						try
+						{
+							writer1.PrepareCommit();
+						}
+						catch (System.Exception t)
+						{
+							writer1.Rollback();
+							writer2.Rollback();
+							return ;
+						}
+						try
+						{
+							writer2.PrepareCommit();
+						}
+						catch (System.Exception t)
+						{
+							writer1.Rollback();
+							writer2.Rollback();
+							return ;
+						}
+						
+						writer1.Commit();
+						writer2.Commit();
+					}
+				}
+				finally
+				{
+					TestTransactions.doFail = false;
+				}
+				
+				writer1.Close();
+				writer2.Close();
+			}
+			
+			public virtual void  Update(IndexWriter writer)
+			{
+				// Add 10 docs:
+				for (int j = 0; j < 10; j++)
+				{
+					Document d = new Document();
+					int n = Enclosing_Instance.RANDOM.Next();
+					d.Add(new Field("id", System.Convert.ToString(nextID++), Field.Store.YES, Field.Index.NOT_ANALYZED));
+					d.Add(new Field("contents", English.IntToEnglish(n), Field.Store.NO, Field.Index.ANALYZED));
+					writer.AddDocument(d);
+				}
+				
+				// Delete 5 docs:
+				int deleteID = nextID - 1;
+				for (int j = 0; j < 5; j++)
+				{
+					writer.DeleteDocuments(new Term("id", "" + deleteID));
+					deleteID -= 2;
+				}
+			}
+		}
+		
+		private class SearcherThread:TimedThread
+		{
+			internal Directory dir1;
+			internal Directory dir2;
+			internal System.Object lock_Renamed;
+			
+			public SearcherThread(System.Object lock_Renamed, Directory dir1, Directory dir2, TimedThread[] threads):base(threads)
+			{
+				this.lock_Renamed = lock_Renamed;
+				this.dir1 = dir1;
+				this.dir2 = dir2;
+			}
+			
+			public override void  DoWork()
+			{
+				IndexReader r1, r2;
+				lock (lock_Renamed)
+				{
+					r1 = IndexReader.Open(dir1);
+					r2 = IndexReader.Open(dir2);
+				}
+				if (r1.NumDocs() != r2.NumDocs())
+					throw new System.SystemException("doc counts differ: r1=" + r1.NumDocs() + " r2=" + r2.NumDocs());
+				r1.Close();
+				r2.Close();
+			}
+		}
+		
+		public virtual void  InitIndex(Directory dir)
+		{
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			for (int j = 0; j < 7; j++)
+			{
+				Document d = new Document();
+				int n = RANDOM.Next();
+				d.Add(new Field("contents", English.IntToEnglish(n), Field.Store.NO, Field.Index.ANALYZED));
+				writer.AddDocument(d);
+			}
+			writer.Close();
+		}
+		
+		[Test]
+		public virtual void  TestTransactions_Rename()
+		{
+			RANDOM = NewRandom();
+			MockRAMDirectory dir1 = new MockRAMDirectory();
+			MockRAMDirectory dir2 = new MockRAMDirectory();
+			dir1.SetPreventDoubleWrite(false);
+			dir2.SetPreventDoubleWrite(false);
+			dir1.FailOn(new RandomFailure(this));
+			dir2.FailOn(new RandomFailure(this));
+			
+			InitIndex(dir1);
+			InitIndex(dir2);
+			
+			TimedThread[] threads = new TimedThread[3];
+			int numThread = 0;
+			
+			IndexerThread indexerThread = new IndexerThread(this, this, dir1, dir2, threads);
+			threads[numThread++] = indexerThread;
+			indexerThread.Start();
+			
+			SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads);
+			threads[numThread++] = searcherThread1;
+			searcherThread1.Start();
+			
+			SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads);
+			threads[numThread++] = searcherThread2;
+			searcherThread2.Start();
+			
+			for (int i = 0; i < numThread; i++)
+				threads[i].Join();
+			
+			for (int i = 0; i < numThread; i++)
+				Assert.IsTrue(!((TimedThread) threads[i]).failed);
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestWordlistLoader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestWordlistLoader.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestWordlistLoader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestWordlistLoader.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -24,9 +24,11 @@
 
 namespace Lucene.Net.Index
 {
-	[TestFixture]
-	public class TestWordlistLoader : LuceneTestCase
+	
+    [TestFixture]
+	public class TestWordlistLoader:LuceneTestCase
 	{
+		
 		[Test]
 		public virtual void  TestWordlistLoading()
 		{
@@ -37,6 +39,17 @@
 			CheckSet(wordSet2);
 		}
 		
+		[Test]
+		public virtual void  TestComments()
+		{
+			System.String s = "ONE\n  two \nthree\n#comment";
+			System.Collections.Hashtable wordSet1 = WordlistLoader.GetWordSet(new System.IO.StreamReader(s), "#");
+			CheckSet(wordSet1);
+			Assert.IsFalse(wordSet1.Contains("#comment"));
+			Assert.IsFalse(wordSet1.Contains("comment"));
+		}
+		
+		
 		private void  CheckSet(System.Collections.Hashtable wordset)
 		{
 			Assert.AreEqual(3, wordset.Count);

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/index.23.cfs.zip
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/index.23.cfs.zip?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
Binary files - no diff available.

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/index.23.nocfs.zip
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/index.23.nocfs.zip?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
Binary files - no diff available.

Modified: incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/QueryParser/TestMultiAnalyzer.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiAnalyzer.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -21,12 +21,15 @@
 
 using Analyzer = Lucene.Net.Analysis.Analyzer;
 using LowerCaseFilter = Lucene.Net.Analysis.LowerCaseFilter;
-using Token = Lucene.Net.Analysis.Token;
 using TokenFilter = Lucene.Net.Analysis.TokenFilter;
 using TokenStream = Lucene.Net.Analysis.TokenStream;
 using StandardTokenizer = Lucene.Net.Analysis.Standard.StandardTokenizer;
+using OffsetAttribute = Lucene.Net.Analysis.Tokenattributes.OffsetAttribute;
+using PositionIncrementAttribute = Lucene.Net.Analysis.Tokenattributes.PositionIncrementAttribute;
+using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
+using TypeAttribute = Lucene.Net.Analysis.Tokenattributes.TypeAttribute;
+using BaseTokenStreamTestCase = Lucene.Net.Analysis.BaseTokenStreamTestCase;
 using Query = Lucene.Net.Search.Query;
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.QueryParsers
 {
@@ -34,18 +37,19 @@
 	/// <summary> Test QueryParser's ability to deal with Analyzers that return more
 	/// than one token per position or that return tokens with a position
 	/// increment &gt; 1.
+	/// 
 	/// </summary>
-	[TestFixture]
-	public class TestMultiAnalyzer : LuceneTestCase
+    [TestFixture]
+	public class TestMultiAnalyzer:BaseTokenStreamTestCase
 	{
 		
 		private static int multiToken = 0;
 		
 		[Test]
-		public virtual void  TestMultiAnalyzer_Renamed_Method()
+		public virtual void  TestMultiAnalyzer_Rename()
 		{
 			
-			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("", new MultiAnalyzer(this));
+			QueryParser qp = new QueryParser("", new MultiAnalyzer(this));
 			
 			// trivial, no multiple tokens:
 			Assert.AreEqual("foo", qp.Parse("foo").ToString());
@@ -87,7 +91,7 @@
 			qp.SetPhraseSlop(0);
 			
 			// non-default operator:
-			qp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.AND_OPERATOR);
+			qp.SetDefaultOperator(QueryParser.AND_OPERATOR);
 			Assert.AreEqual("+(multi multi2) +foo", qp.Parse("multi foo").ToString());
 		}
 		
@@ -111,7 +115,7 @@
 		[Test]
 		public virtual void  TestPosIncrementAnalyzer()
 		{
-			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("", new PosIncrementAnalyzer(this));
+			QueryParser qp = new QueryParser("", new PosIncrementAnalyzer(this));
 			Assert.AreEqual("quick brown", qp.Parse("the quick brown").ToString());
 			Assert.AreEqual("\"quick brown\"", qp.Parse("\"the quick brown\"").ToString());
 			Assert.AreEqual("quick brown fox", qp.Parse("the quick brown fox").ToString());
@@ -121,14 +125,13 @@
 		/// <summary> Expands "multi" to "multi" and "multi2", both at the same position,
 		/// and expands "triplemulti" to "triplemulti", "multi3", and "multi2".  
 		/// </summary>
-		private class MultiAnalyzer : Analyzer
+		private class MultiAnalyzer:Analyzer
 		{
 			private void  InitBlock(TestMultiAnalyzer enclosingInstance)
 			{
 				this.enclosingInstance = enclosingInstance;
 			}
 			private TestMultiAnalyzer enclosingInstance;
-			
 			public TestMultiAnalyzer Enclosing_Instance
 			{
 				get
@@ -152,7 +155,7 @@
 			}
 		}
 		
-		private sealed class TestFilter : TokenFilter
+		private sealed class TestFilter:TokenFilter
 		{
 			private void  InitBlock(TestMultiAnalyzer enclosingInstance)
 			{
@@ -168,45 +171,59 @@
 				
 			}
 			
-			private Lucene.Net.Analysis.Token prevToken;
+			private System.String prevType;
+			private int prevStartOffset;
+			private int prevEndOffset;
+			
+			internal TermAttribute termAtt;
+			internal PositionIncrementAttribute posIncrAtt;
+			internal OffsetAttribute offsetAtt;
+			internal TypeAttribute typeAtt;
 			
-			public TestFilter(TestMultiAnalyzer enclosingInstance, TokenStream in_Renamed) : base(in_Renamed)
+			public TestFilter(TestMultiAnalyzer enclosingInstance, TokenStream in_Renamed):base(in_Renamed)
 			{
 				InitBlock(enclosingInstance);
+				termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
+				posIncrAtt = (PositionIncrementAttribute) AddAttribute(typeof(PositionIncrementAttribute));
+				offsetAtt = (OffsetAttribute) AddAttribute(typeof(OffsetAttribute));
+				typeAtt = (TypeAttribute) AddAttribute(typeof(TypeAttribute));
 			}
 			
-			public override Lucene.Net.Analysis.Token Next(Lucene.Net.Analysis.Token reusableToken)
+			public override bool IncrementToken()
 			{
-				if (TestMultiAnalyzer.multiToken > 0)
+				if (Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken > 0)
 				{
-                    reusableToken.Reinit("multi" + (TestMultiAnalyzer.multiToken + 1), prevToken.StartOffset(), prevToken.EndOffset(), prevToken.Type());
-					reusableToken.SetPositionIncrement(0);
-					TestMultiAnalyzer.multiToken--;
-					return reusableToken;
+					termAtt.SetTermBuffer("multi" + (Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken + 1));
+					offsetAtt.SetOffset(prevStartOffset, prevEndOffset);
+					typeAtt.SetType(prevType);
+					posIncrAtt.SetPositionIncrement(0);
+					Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken--;
+					return true;
 				}
 				else
 				{
-					Lucene.Net.Analysis.Token nextToken = input.Next(reusableToken);
-                    if (nextToken == null)
-                    {
-                        prevToken = null;
-                        return null;
-                    }
-                    prevToken = (Lucene.Net.Analysis.Token)(nextToken.Clone());
-					string text = nextToken.Term();
+					bool next = input.IncrementToken();
+					if (next == false)
+					{
+						return false;
+					}
+					prevType = typeAtt.Type();
+					prevStartOffset = offsetAtt.StartOffset();
+					prevEndOffset = offsetAtt.EndOffset();
+					System.String text = termAtt.Term();
 					if (text.Equals("triplemulti"))
 					{
-						TestMultiAnalyzer.multiToken = 2;
-						return nextToken;
+						Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken = 2;
+						return true;
 					}
 					else if (text.Equals("multi"))
 					{
-						TestMultiAnalyzer.multiToken = 1;
-						return nextToken;
+						Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken = 1;
+						return true;
 					}
 					else
 					{
-						return nextToken;
+						return true;
 					}
 				}
 			}
@@ -215,7 +232,7 @@
 		/// <summary> Analyzes "the quick brown" as: quick(incr=2) brown(incr=1).
 		/// Does not work correctly for input other than "the quick brown ...".
 		/// </summary>
-		private class PosIncrementAnalyzer : Analyzer
+		private class PosIncrementAnalyzer:Analyzer
 		{
 			private void  InitBlock(TestMultiAnalyzer enclosingInstance)
 			{
@@ -245,7 +262,7 @@
 			}
 		}
 		
-		private sealed class TestPosIncrementFilter : TokenFilter
+		private sealed class TestPosIncrementFilter:TokenFilter
 		{
 			private void  InitBlock(TestMultiAnalyzer enclosingInstance)
 			{
@@ -261,36 +278,41 @@
 				
 			}
 			
-			public TestPosIncrementFilter(TestMultiAnalyzer enclosingInstance, TokenStream in_Renamed) : base(in_Renamed)
+			internal TermAttribute termAtt;
+			internal PositionIncrementAttribute posIncrAtt;
+			
+			public TestPosIncrementFilter(TestMultiAnalyzer enclosingInstance, TokenStream in_Renamed):base(in_Renamed)
 			{
 				InitBlock(enclosingInstance);
+				termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
+				posIncrAtt = (PositionIncrementAttribute) AddAttribute(typeof(PositionIncrementAttribute));
 			}
 			
-			public override Lucene.Net.Analysis.Token Next(Lucene.Net.Analysis.Token reusableToken)
+			public override bool IncrementToken()
 			{
-                for (Lucene.Net.Analysis.Token nextToken = input.Next(reusableToken); nextToken != null; nextToken = input.Next(reusableToken))
+				while (input.IncrementToken())
 				{
-					if (nextToken.Term().Equals("the"))
+					if (termAtt.Term().Equals("the"))
 					{
 						// stopword, do nothing
 					}
-					else if (nextToken.Term().Equals("quick"))
+					else if (termAtt.Term().Equals("quick"))
 					{
-						nextToken.SetPositionIncrement(2);
-						return nextToken;
+						posIncrAtt.SetPositionIncrement(2);
+						return true;
 					}
 					else
 					{
-						nextToken.SetPositionIncrement(1);
-                        return nextToken;
+						posIncrAtt.SetPositionIncrement(1);
+						return true;
 					}
 				}
-				return null;
+				return false;
 			}
 		}
 		
 		/// <summary>a very simple subclass of QueryParser </summary>
-		public class DumbQueryParser : Lucene.Net.QueryParsers.QueryParser
+		private sealed class DumbQueryParser:QueryParser
 		{
 			
 			public DumbQueryParser(System.String f, Analyzer a):base(f, a)
@@ -298,12 +320,12 @@
 			}
 			
 			/// <summary>expose super's version </summary>
-			public Lucene.Net.Search.Query GetSuperFieldQuery(System.String f, System.String t)
+			public Query GetSuperFieldQuery(System.String f, System.String t)
 			{
 				return base.GetFieldQuery(f, t);
 			}
 			/// <summary>wrap super's version </summary>
-			public override Lucene.Net.Search.Query GetFieldQuery(System.String f, System.String t)
+			public /*protected internal*/ override Query GetFieldQuery(System.String f, System.String t)
 			{
 				return new DumbQueryWrapper(GetSuperFieldQuery(f, t));
 			}
@@ -313,11 +335,11 @@
 		/// the toString of the query it wraps.
 		/// </summary>
 		[Serializable]
-		private sealed class DumbQueryWrapper : Lucene.Net.Search.Query
+		private sealed class DumbQueryWrapper:Query
 		{
 			
-			private Lucene.Net.Search.Query q;
-			public DumbQueryWrapper(Lucene.Net.Search.Query q):base()
+			private Query q;
+			public DumbQueryWrapper(Query q):base()
 			{
 				this.q = q;
 			}
@@ -327,6 +349,7 @@
 			}
 			override public System.Object Clone()
 			{
+                System.Diagnostics.Debug.Fail("Port issue:", "Do we need TestMultiAnalyzer.DumbQueryWrapper.Clone()?");
 				return null;
 			}
 		}

Modified: incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiFieldQueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/QueryParser/TestMultiFieldQueryParser.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiFieldQueryParser.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiFieldQueryParser.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -20,34 +20,32 @@
 using NUnit.Framework;
 
 using Analyzer = Lucene.Net.Analysis.Analyzer;
-using Token = Lucene.Net.Analysis.Token;
 using TokenStream = Lucene.Net.Analysis.TokenStream;
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using BaseTokenStreamTestCase = Lucene.Net.Analysis.BaseTokenStreamTestCase;
 using BooleanClause = Lucene.Net.Search.BooleanClause;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using Query = Lucene.Net.Search.Query;
 using ScoreDoc = Lucene.Net.Search.ScoreDoc;
 using Occur = Lucene.Net.Search.BooleanClause.Occur;
-using Directory = Lucene.Net.Store.Directory;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.QueryParsers
 {
 	
 	/// <summary> Tests QueryParser.</summary>
-	[TestFixture]
-	public class TestMultiFieldQueryParser : LuceneTestCase
+    [TestFixture]
+	public class TestMultiFieldQueryParser:BaseTokenStreamTestCase
 	{
 		
 		/// <summary>test stop words arsing for both the non static form, and for the 
 		/// corresponding static form (qtxt, fields[]). 
 		/// </summary>
-		[Test]
-		public virtual void  TesStopwordsParsing()
+		public virtual void  tesStopwordsParsing()
 		{
 			AssertStopQueryEquals("one", "b:one t:one");
 			AssertStopQueryEquals("one stop", "b:one t:one");
@@ -118,16 +116,16 @@
 			q = mfqp.Parse("\"foo bar\"~4");
 			Assert.AreEqual("b:\"foo bar\"~4 t:\"foo bar\"~4", q.ToString());
 			
-            // LUCENE-1213: MultiFieldQueryParser was ignoring slop when phrase had a field
-            q = mfqp.Parse("b:\"foo bar\"~4");
-            Assert.AreEqual("b:\"foo bar\"~4", q.ToString());
-
+			// LUCENE-1213: MultiFieldQueryParser was ignoring slop when phrase had a field.
+			q = mfqp.Parse("b:\"foo bar\"~4");
+			Assert.AreEqual("b:\"foo bar\"~4", q.ToString());
+			
 			// make sure that terms which have a field are not touched:
 			q = mfqp.Parse("one f:two");
 			Assert.AreEqual("(b:one t:one) f:two", q.ToString());
 			
 			// AND mode:
-			mfqp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.AND_OPERATOR);
+			mfqp.SetDefaultOperator(QueryParser.AND_OPERATOR);
 			q = mfqp.Parse("one two");
 			Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString());
 			q = mfqp.Parse("\"aa bb cc\" \"dd ee\"");
@@ -190,7 +188,7 @@
 				q = MultiFieldQueryParser.Parse(queries5, fields, new StandardAnalyzer());
 				Assert.Fail();
 			}
-			catch (System.ArgumentException)
+			catch (System.ArgumentException e)
 			{
 				// expected exception, array length differs
 			}
@@ -224,7 +222,7 @@
 				q = MultiFieldQueryParser.Parse("blah", fields, flags2, new StandardAnalyzer());
 				Assert.Fail();
 			}
-			catch (System.ArgumentException)
+			catch (System.ArgumentException e)
 			{
 				// expected exception, array length differs
 			}
@@ -250,7 +248,7 @@
 				q = MultiFieldQueryParser.Parse("blah", fields, flags2, new StandardAnalyzer());
 				Assert.Fail();
 			}
-			catch (System.ArgumentException)
+			catch (System.ArgumentException e)
 			{
 				// expected exception, array length differs
 			}
@@ -271,7 +269,7 @@
 				q = MultiFieldQueryParser.Parse(queries, fields, flags2, new StandardAnalyzer());
 				Assert.Fail();
 			}
-			catch (System.ArgumentException)
+			catch (System.ArgumentException e)
 			{
 				// expected exception, array length differs
 			}
@@ -292,7 +290,7 @@
 				q = MultiFieldQueryParser.Parse(queries, fields, flags2, new StandardAnalyzer());
 				Assert.Fail();
 			}
-			catch (System.ArgumentException)
+			catch (System.ArgumentException e)
 			{
 				// expected exception, array length differs
 			}
@@ -320,13 +318,13 @@
 			Analyzer analyzer = new StandardAnalyzer();
 			Directory ramDir = new RAMDirectory();
 			IndexWriter iw = new IndexWriter(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field("body", "blah the footest blah", Field.Store.NO, Field.Index.ANALYZED));
 			iw.AddDocument(doc);
 			iw.Close();
 			
 			MultiFieldQueryParser mfqp = new MultiFieldQueryParser(new System.String[]{"body"}, analyzer);
-			mfqp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.Operator.AND);
+			mfqp.SetDefaultOperator(QueryParser.Operator.AND);
 			Query q = mfqp.Parse("the footest");
 			IndexSearcher is_Renamed = new IndexSearcher(ramDir);
 			ScoreDoc[] hits = is_Renamed.Search(q, null, 1000).scoreDocs;
@@ -335,7 +333,7 @@
 		}
 		
 		/// <summary> Return empty tokens for field "f1".</summary>
-		private class AnalyzerReturningNull : Analyzer
+		private class AnalyzerReturningNull:Analyzer
 		{
 			internal StandardAnalyzer stdAnalyzer = new StandardAnalyzer();
 			
@@ -355,11 +353,11 @@
 				}
 			}
 			
-			private class EmptyTokenStream : TokenStream
+			private class EmptyTokenStream:TokenStream
 			{
-				public override Lucene.Net.Analysis.Token Next(Lucene.Net.Analysis.Token reusableToken)
+				public override bool IncrementToken()
 				{
-					return null;
+					return false;
 				}
 			}
 		}

Modified: incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/QueryParser/TestQueryParser.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -29,35 +29,46 @@
 using TokenStream = Lucene.Net.Analysis.TokenStream;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using OffsetAttribute = Lucene.Net.Analysis.Tokenattributes.OffsetAttribute;
+using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
 using DateField = Lucene.Net.Documents.DateField;
 using DateTools = Lucene.Net.Documents.DateTools;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using BooleanQuery = Lucene.Net.Search.BooleanQuery;
-using ConstantScoreRangeQuery = Lucene.Net.Search.ConstantScoreRangeQuery;
 using FuzzyQuery = Lucene.Net.Search.FuzzyQuery;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using MatchAllDocsQuery = Lucene.Net.Search.MatchAllDocsQuery;
+using MultiTermQuery = Lucene.Net.Search.MultiTermQuery;
 using PhraseQuery = Lucene.Net.Search.PhraseQuery;
 using PrefixQuery = Lucene.Net.Search.PrefixQuery;
 using Query = Lucene.Net.Search.Query;
-using RangeQuery = Lucene.Net.Search.RangeQuery;
 using ScoreDoc = Lucene.Net.Search.ScoreDoc;
 using TermQuery = Lucene.Net.Search.TermQuery;
+using TermRangeQuery = Lucene.Net.Search.TermRangeQuery;
 using WildcardQuery = Lucene.Net.Search.WildcardQuery;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using LocalizedTestCase = Lucene.Net.Util.LocalizedTestCase;
 
 namespace Lucene.Net.QueryParsers
 {
 	
 	/// <summary> Tests QueryParser.</summary>
-	[TestFixture]
-	public class TestQueryParser : LuceneTestCase
+	public class TestQueryParser:LocalizedTestCase
 	{
-		public class AnonymousClassQueryParser : Lucene.Net.QueryParsers.QueryParser
+        static System.Collections.Hashtable dataTestWithDifferentLocals = new System.Collections.Hashtable();
+        static TestQueryParser()
+        {
+    		System.String[] data = new System.String[] {"TestLegacyDateRange", "TestDateRange", "TestCJK", "TestNumber", "TestFarsiRangeCollating", "TestLocalDateFormat"};
+            for (int i = 0; i < data.Length; i++)
+            {
+                dataTestWithDifferentLocals.Add(data[i], data[i]);
+            }
+        }
+
+		private class AnonymousClassQueryParser : QueryParser
 		{
 			private void  InitBlock(int[] type, TestQueryParser enclosingInstance)
 			{
@@ -78,66 +89,79 @@
 			{
 				InitBlock(type, enclosingInstance);
 			}
-			public override Lucene.Net.Search.Query GetWildcardQuery(System.String field, System.String termStr)
+			public /*protected internal*/ override Query GetWildcardQuery(System.String field, System.String termStr)
 			{
 				// override error checking of superclass
 				type[0] = 1;
 				return new TermQuery(new Term(field, termStr));
 			}
-			public override Lucene.Net.Search.Query GetPrefixQuery(System.String field, System.String termStr)
+			public /*protected internal*/ override Query GetPrefixQuery(System.String field, System.String termStr)
 			{
 				// override error checking of superclass
 				type[0] = 2;
 				return new TermQuery(new Term(field, termStr));
 			}
 			
-			public override Lucene.Net.Search.Query GetFieldQuery(System.String field, System.String queryText)
+			public /*protected internal*/ override Query GetFieldQuery(System.String field, System.String queryText)
 			{
 				type[0] = 3;
 				return base.GetFieldQuery(field, queryText);
 			}
 		}
 		
+		public TestQueryParser(System.String name):base(name, dataTestWithDifferentLocals)
+		{
+		}
+		
 		public static Analyzer qpAnalyzer = new QPTestAnalyzer();
 		
-		public class QPTestFilter : TokenFilter
+		public class QPTestFilter:TokenFilter
 		{
+			internal TermAttribute termAtt;
+			internal OffsetAttribute offsetAtt;
+			
 			/// <summary> Filter which discards the token 'stop' and which expands the
 			/// token 'phrase' into 'phrase1 phrase2'
 			/// </summary>
-			public QPTestFilter(TokenStream in_Renamed) : base(in_Renamed)
+			public QPTestFilter(TokenStream in_Renamed):base(in_Renamed)
 			{
+				termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
+				offsetAtt = (OffsetAttribute) AddAttribute(typeof(OffsetAttribute));
 			}
 			
 			internal bool inPhrase = false;
 			internal int savedStart = 0, savedEnd = 0;
 			
-			public override Lucene.Net.Analysis.Token Next(Lucene.Net.Analysis.Token reusableToken)
+			public override bool IncrementToken()
 			{
-                System.Diagnostics.Debug.Assert(reusableToken != null);
 				if (inPhrase)
 				{
 					inPhrase = false;
-					return reusableToken.Reinit("phrase2", savedStart, savedEnd);
+					termAtt.SetTermBuffer("phrase2");
+					offsetAtt.SetOffset(savedStart, savedEnd);
+					return true;
 				}
 				else
-					for (Lucene.Net.Analysis.Token nextToken = input.Next(reusableToken); nextToken != null; nextToken = input.Next(reusableToken))
+					while (input.IncrementToken())
 					{
-						if (nextToken.Term().Equals("phrase"))
+						if (termAtt.Term().Equals("phrase"))
 						{
 							inPhrase = true;
-							savedStart = nextToken.StartOffset();
-							savedEnd = nextToken.EndOffset();
-							return nextToken.Reinit("phrase1", savedStart, savedEnd);
+							savedStart = offsetAtt.StartOffset();
+							savedEnd = offsetAtt.EndOffset();
+							termAtt.SetTermBuffer("phrase1");
+							offsetAtt.SetOffset(savedStart, savedEnd);
+							return true;
 						}
-						else if (!nextToken.Term().Equals("stop"))
-							return nextToken;
+						else if (!termAtt.Term().Equals("stop"))
+							return true;
 					}
-				return null;
+				return false;
 			}
 		}
 		
-		public class QPTestAnalyzer : Analyzer
+		
+		public class QPTestAnalyzer:Analyzer
 		{
 			
 			/// <summary>Filters LowerCaseTokenizer with StopFilter. </summary>
@@ -147,18 +171,18 @@
 			}
 		}
 		
-		public class QPTestParser : Lucene.Net.QueryParsers.QueryParser
+		public class QPTestParser:QueryParser
 		{
 			public QPTestParser(System.String f, Analyzer a):base(f, a)
 			{
 			}
 			
-			public override Query GetFuzzyQuery(System.String field, System.String termStr, float minSimilarity)
+			public /*protected internal*/ override Query GetFuzzyQuery(System.String field, System.String termStr, float minSimilarity)
 			{
 				throw new ParseException("Fuzzy queries not allowed");
 			}
 			
-			public override Query GetWildcardQuery(System.String field, System.String termStr)
+			public /*protected internal*/ override Query GetWildcardQuery(System.String field, System.String termStr)
 			{
 				throw new ParseException("Wildcard queries not allowed");
 			}
@@ -167,18 +191,18 @@
 		private int originalMaxClauses;
 		
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
 			base.SetUp();
 			originalMaxClauses = BooleanQuery.GetMaxClauseCount();
 		}
 		
-		public virtual Lucene.Net.QueryParsers.QueryParser GetParser(Analyzer a)
+		public virtual QueryParser GetParser(Analyzer a)
 		{
 			if (a == null)
 				a = new SimpleAnalyzer();
-			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", a);
-			qp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.OR_OPERATOR);
+			QueryParser qp = new QueryParser("field", a);
+			qp.SetDefaultOperator(QueryParser.OR_OPERATOR);
 			return qp;
 		}
 		
@@ -197,7 +221,7 @@
 			}
 		}
 		
-		public virtual void  AssertQueryEquals(Lucene.Net.QueryParsers.QueryParser qp, System.String field, System.String query, System.String result)
+		public virtual void  AssertQueryEquals(QueryParser qp, System.String field, System.String query, System.String result)
 		{
 			Query q = qp.Parse(query);
 			System.String s = q.ToString(field);
@@ -209,7 +233,7 @@
 		
 		public virtual void  AssertEscapedQueryEquals(System.String query, Analyzer a, System.String result)
 		{
-			System.String escapedQuery = Lucene.Net.QueryParsers.QueryParser.Escape(query);
+			System.String escapedQuery = QueryParser.Escape(query);
 			if (!escapedQuery.Equals(result))
 			{
 				Assert.Fail("Query /" + query + "/ yielded /" + escapedQuery + "/, expecting /" + result + "/");
@@ -218,7 +242,7 @@
 		
 		public virtual void  AssertWildcardQueryEquals(System.String query, bool lowercase, System.String result, bool allowLeadingWildcard)
 		{
-			Lucene.Net.QueryParsers.QueryParser qp = GetParser(null);
+			QueryParser qp = GetParser(null);
 			qp.SetLowercaseExpandedTerms(lowercase);
 			qp.SetAllowLeadingWildcard(allowLeadingWildcard);
 			Query q = qp.Parse(query);
@@ -236,7 +260,7 @@
 		
 		public virtual void  AssertWildcardQueryEquals(System.String query, System.String result)
 		{
-			Lucene.Net.QueryParsers.QueryParser qp = GetParser(null);
+			QueryParser qp = GetParser(null);
 			Query q = qp.Parse(query);
 			System.String s = q.ToString("field");
 			if (!s.Equals(result))
@@ -249,8 +273,8 @@
 		{
 			if (a == null)
 				a = new SimpleAnalyzer();
-			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", a);
-			qp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.AND_OPERATOR);
+			QueryParser qp = new QueryParser("field", a);
+			qp.SetDefaultOperator(QueryParser.AND_OPERATOR);
 			return qp.Parse(query);
 		}
 		
@@ -265,11 +289,20 @@
 		}
 		
 		[Test]
+		public virtual void  TestCJK()
+		{
+			// Test Ideographic Space - As wide as a CJK character cell (fullwidth)
+			// used google to translate the word "term" to japanese -> 用語
+			AssertQueryEquals("term\u3000term\u3000term", null, "term\u0020term\u0020term");
+			AssertQueryEquals("用語\u3000用語\u3000用語", null, "用語\u0020用語\u0020用語");
+		}
+		
+		[Test]
 		public virtual void  TestSimple()
 		{
 			AssertQueryEquals("term term term", null, "term term term");
-			AssertQueryEquals("türm term term", new WhitespaceAnalyzer(), "türm term term");
-			AssertQueryEquals("ümlaut", new WhitespaceAnalyzer(), "ümlaut");
+			AssertQueryEquals("türm term term", new WhitespaceAnalyzer(), "türm term term");
+			AssertQueryEquals("ümlaut", new WhitespaceAnalyzer(), "ümlaut");
 			
 			AssertQueryEquals("\"\"", new KeywordAnalyzer(), "");
 			AssertQueryEquals("foo:\"\"", new KeywordAnalyzer(), "foo:");
@@ -310,13 +343,13 @@
 			AssertQueryEquals("+(apple \"steve jobs\") -(foo bar baz)", null, "+(apple \"steve jobs\") -(foo bar baz)");
 			AssertQueryEquals("+title:(dog OR cat) -author:\"bob dole\"", null, "+(title:dog title:cat) -author:\"bob dole\"");
 			
-			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new StandardAnalyzer());
+			QueryParser qp = new QueryParser("field", new StandardAnalyzer());
 			// make sure OR is the default:
-			Assert.AreEqual(Lucene.Net.QueryParsers.QueryParser.OR_OPERATOR, qp.GetDefaultOperator());
-			qp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.AND_OPERATOR);
-			Assert.AreEqual(Lucene.Net.QueryParsers.QueryParser.AND_OPERATOR, qp.GetDefaultOperator());
-			qp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.OR_OPERATOR);
-			Assert.AreEqual(Lucene.Net.QueryParsers.QueryParser.OR_OPERATOR, qp.GetDefaultOperator());
+			Assert.AreEqual(QueryParser.OR_OPERATOR, qp.GetDefaultOperator());
+			qp.SetDefaultOperator(QueryParser.AND_OPERATOR);
+			Assert.AreEqual(QueryParser.AND_OPERATOR, qp.GetDefaultOperator());
+			qp.SetDefaultOperator(QueryParser.OR_OPERATOR);
+			Assert.AreEqual(QueryParser.OR_OPERATOR, qp.GetDefaultOperator());
 		}
 		
 		[Test]
@@ -420,7 +453,7 @@
 				AssertWildcardQueryEquals("*Term", true, "*term");
 				Assert.Fail();
 			}
-			catch (ParseException)
+			catch (ParseException pe)
 			{
 				// expected exception
 			}
@@ -429,7 +462,7 @@
 				AssertWildcardQueryEquals("?Term", true, "?term");
 				Assert.Fail();
 			}
-			catch (ParseException)
+			catch (ParseException pe)
 			{
 				// expected exception
 			}
@@ -441,7 +474,7 @@
 		[Test]
 		public virtual void  TestLeadingWildcardType()
 		{
-			Lucene.Net.QueryParsers.QueryParser qp = GetParser(null);
+			QueryParser qp = GetParser(null);
 			qp.SetAllowLeadingWildcard(true);
 			Assert.AreEqual(typeof(WildcardQuery), qp.Parse("t*erm*").GetType());
 			Assert.AreEqual(typeof(WildcardQuery), qp.Parse("?term*").GetType());
@@ -481,11 +514,11 @@
 		public virtual void  TestRange()
 		{
 			AssertQueryEquals("[ a TO z]", null, "[a TO z]");
-			Assert.IsTrue(GetQuery("[ a TO z]", null) is ConstantScoreRangeQuery);
+			Assert.AreEqual(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((TermRangeQuery) GetQuery("[ a TO z]", null)).GetRewriteMethod());
 			
-			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new SimpleAnalyzer());
-			qp.SetUseOldRangeQuery(true);
-			Assert.IsTrue(qp.Parse("[ a TO z]") is RangeQuery);
+			QueryParser qp = new QueryParser("field", new SimpleAnalyzer());
+			qp.SetMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+			Assert.AreEqual(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE, ((TermRangeQuery) qp.Parse("[ a TO z]")).GetRewriteMethod());
 			
 			AssertQueryEquals("[ a TO z ]", null, "[a TO z]");
 			AssertQueryEquals("{ a TO z}", null, "{a TO z}");
@@ -496,53 +529,64 @@
 			AssertQueryEquals("( bar blar { a TO z}) ", null, "bar blar {a TO z}");
 			AssertQueryEquals("gack ( bar blar { a TO z}) ", null, "gack (bar blar {a TO z})");
 		}
-
-        public void testFarsiRangeCollating()
-        {
-
-            RAMDirectory ramDir = new RAMDirectory();
-            IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(), true,
-                                             IndexWriter.MaxFieldLength.LIMITED);
-            Document doc = new Document();
-            doc.Add(new Field("content", "\u0633\u0627\u0628",
-                              Field.Store.YES, Field.Index.UN_TOKENIZED));
-            iw.AddDocument(doc);
-            iw.Close();
-            IndexSearcher is_Renamed = new IndexSearcher(ramDir);
-
-            QueryParser qp = new QueryParser("content", new WhitespaceAnalyzer());
-
-            // Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in
-            // RuleBasedCollator.  However, the Arabic Locale seems to order the Farsi
-            // characters properly.
-            System.Globalization.CompareInfo c = new System.Globalization.CultureInfo("ar").CompareInfo;
-            qp.SetRangeCollator(c);
-
-            // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
-            // orders the U+0698 character before the U+0633 character, so the single
-            // index Term below should NOT be returned by a ConstantScoreRangeQuery
-            // with a Farsi Collator (or an Arabic one for the case when Farsi is_Renamed not
-            // supported).
-
-            // Test ConstantScoreRangeQuery
-            qp.SetUseOldRangeQuery(false);
-            ScoreDoc[] result = is_Renamed.Search(qp.Parse("[ \u062F TO \u0698 ]"), null, 1000).scoreDocs;
-            Assert.AreEqual(0, result.Length, "The index Term should not be included.");
-
-            result = is_Renamed.Search(qp.Parse("[ \u0633 TO \u0638 ]"), null, 1000).scoreDocs;
-            Assert.AreEqual(1, result.Length, "The index Term should be included.");
-
-            // Test RangeQuery
-            qp.SetUseOldRangeQuery(true);
-            result = is_Renamed.Search(qp.Parse("[ \u062F TO \u0698 ]"), null, 1000).scoreDocs;
-            Assert.AreEqual(0, result.Length, "The index Term should not be included.");
-
-            result = is_Renamed.Search(qp.Parse("[ \u0633 TO \u0638 ]"), null, 1000).scoreDocs;
-            Assert.AreEqual(1, result.Length, "The index Term should be included.");
-
-            is_Renamed.Close();
-        }
-  
+		
+		[Test]
+		public virtual void  TestFarsiRangeCollating()
+		{
+			
+			RAMDirectory ramDir = new RAMDirectory();
+			IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			Document doc = new Document();
+			doc.Add(new Field("content", "\u0633\u0627\u0628", Field.Store.YES, Field.Index.UN_TOKENIZED));
+			iw.AddDocument(doc);
+			iw.Close();
+			IndexSearcher is_Renamed = new IndexSearcher(ramDir);
+			
+			QueryParser qp = new QueryParser("content", new WhitespaceAnalyzer());
+			
+			// Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in
+			// RuleBasedCollator.  However, the Arabic Locale seems to order the Farsi
+			// characters properly.
+			System.Globalization.CompareInfo c = new System.Globalization.CultureInfo("ar").CompareInfo;
+			qp.SetRangeCollator(c);
+			
+			// Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
+			// orders the U+0698 character before the U+0633 character, so the single
+			// index Term below should NOT be returned by a ConstantScoreRangeQuery
+			// with a Farsi Collator (or an Arabic one for the case when Farsi is not
+			// supported).
+			
+			// Test ConstantScoreRangeQuery
+			qp.SetMultiTermRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+			ScoreDoc[] result = is_Renamed.Search(qp.Parse("[ \u062F TO \u0698 ]"), null, 1000).scoreDocs;
+			Assert.AreEqual(0, result.Length, "The index Term should not be included.");
+			
+			result = is_Renamed.Search(qp.Parse("[ \u0633 TO \u0638 ]"), null, 1000).scoreDocs;
+			Assert.AreEqual(1, result.Length, "The index Term should be included.");
+			
+			// Test TermRangeQuery
+			qp.SetMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+			result = is_Renamed.Search(qp.Parse("[ \u062F TO \u0698 ]"), null, 1000).scoreDocs;
+			Assert.AreEqual(0, result.Length, "The index Term should not be included.");
+			
+			result = is_Renamed.Search(qp.Parse("[ \u0633 TO \u0638 ]"), null, 1000).scoreDocs;
+			Assert.AreEqual(1, result.Length, "The index Term should be included.");
+			
+			is_Renamed.Close();
+		}
+		
+		private System.String EscapeDateString(System.String s)
+		{
+			if (s.IndexOf(" ") > - 1)
+			{
+				return "\"" + s + "\"";
+			}
+			else
+			{
+				return s;
+			}
+		}
+		
 		/// <summary>for testing legacy DateField support </summary>
 		private System.String GetLegacyDate(System.String s)
 		{
@@ -555,7 +599,7 @@
 		{
 			System.DateTime tempAux = System.DateTime.Parse(s, System.Globalization.CultureInfo.CurrentCulture);
 			return GetDate(tempAux, resolution);
-		}                                               
+		}
 		
 		/// <summary>for testing DateTools support </summary>
 		private System.String GetDate(System.DateTime d, DateTools.Resolution resolution)
@@ -570,48 +614,43 @@
 			}
 		}
 		
-		public virtual System.String GetDate(System.String s)
-		{
-			System.DateTime tempAux = System.DateTime.Parse(s);
-			return DateField.DateToString(tempAux);
-		}
-		
 		private System.String GetLocalizedDate(int year, int month, int day, bool extendLastDate)
 		{
-			System.DateTime temp = new System.DateTime(year, month, day);
+			System.Globalization.Calendar calendar = new System.Globalization.GregorianCalendar();
+            System.DateTime temp = new System.DateTime(year, month, day, calendar);
 			if (extendLastDate)
 			{
-				temp = temp.AddHours(23);
-				temp = temp.AddMinutes(59);
-				temp = temp.AddSeconds(59);
-				temp = temp.AddMilliseconds(999);
-			}
-			return temp.ToShortDateString();    // ToString("MM/d/yyy");
-		}
+                temp = temp.AddHours(23);
+                temp = temp.AddMinutes(59);
+                temp = temp.AddSeconds(59);
+                temp = temp.AddMilliseconds(999);
+            }
+            return temp.ToShortDateString();
+        }
 		
 		/// <summary>for testing legacy DateField support </summary>
 		[Test]
 		public virtual void  TestLegacyDateRange()
 		{
-			System.String startDate = GetLocalizedDate(2002, 2, 1, false);
-			System.String endDate = GetLocalizedDate(2002, 2, 4, false);
+			System.String startDate = GetLocalizedDate(2002, 1, 1, false);
+			System.String endDate = GetLocalizedDate(2002, 1, 4, false);
 			System.Globalization.Calendar endDateExpected = new System.Globalization.GregorianCalendar();
-			// endDateExpected should be set to: "2002, 1, 4, 23, 59, 59, 999" otherwise what's the point of useing GregorianCalendar()   // {{Aroush-2.1}}
-			System.DateTime tempAux = new System.DateTime(2002, 2, 4, 23, 59, 59, 999);
-			AssertQueryEquals("[ " + startDate + " TO " + endDate + "]", null, "[" + GetLegacyDate(startDate) + " TO " + DateField.DateToString(tempAux) + "]");
-			AssertQueryEquals("{  " + startDate + "    " + endDate + "   }", null, "{" + GetLegacyDate(startDate) + " TO " + GetLegacyDate(endDate) + "}");
+			System.DateTime tempAux = new System.DateTime(2002, 2, 4, 23, 59, 59, 999, endDateExpected);
+			AssertQueryEquals("[ " + EscapeDateString(startDate) + " TO " + EscapeDateString(endDate) + "]", null, "[" + GetLegacyDate(startDate) + " TO " + DateField.DateToString(tempAux) + "]");
+			AssertQueryEquals("{  " + EscapeDateString(startDate) + "    " + EscapeDateString(endDate) + "   }", null, "{" + GetLegacyDate(startDate) + " TO " + GetLegacyDate(endDate) + "}");
 		}
 		
 		[Test]
 		public virtual void  TestDateRange()
 		{
-			System.String startDate = GetLocalizedDate(2002, 2, 1, false);
-			System.String endDate = GetLocalizedDate(2002, 2, 4, false);
-			System.DateTime endDateExpected = new System.DateTime(2002, 2, 4, 23, 59, 59, 999);
+			System.String startDate = GetLocalizedDate(2002, 1, 1, false);
+			System.String endDate = GetLocalizedDate(2002, 1, 4, false);
+			System.Globalization.Calendar calendar = new System.Globalization.GregorianCalendar();
+            System.DateTime endDateExpected = new System.DateTime(2002, 2, 4, 23, 59, 59, 999, calendar);
 			System.String defaultField = "default";
 			System.String monthField = "month";
 			System.String hourField = "hour";
-			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new SimpleAnalyzer());
+			QueryParser qp = new QueryParser("field", new SimpleAnalyzer());
 			
 			// Don't set any date resolution and verify if DateField is used
 			System.DateTime tempAux = endDateExpected;
@@ -643,10 +682,10 @@
 			AssertDateRangeQueryEquals(qp, hourField, startDate, endDate, tempAux5, DateTools.Resolution.HOUR);
 		}
 		
-		public virtual void  AssertDateRangeQueryEquals(Lucene.Net.QueryParsers.QueryParser qp, System.String field, System.String startDate, System.String endDate, System.DateTime endDateInclusive, DateTools.Resolution resolution)
+		public virtual void  AssertDateRangeQueryEquals(QueryParser qp, System.String field, System.String startDate, System.String endDate, System.DateTime endDateInclusive, DateTools.Resolution resolution)
 		{
-			AssertQueryEquals(qp, field, field + ":[" + startDate + " TO " + endDate + "]", "[" + GetDate(startDate, resolution) + " TO " + GetDate(endDateInclusive, resolution) + "]");
-			AssertQueryEquals(qp, field, field + ":{" + startDate + " TO " + endDate + "}", "{" + GetDate(startDate, resolution) + " TO " + GetDate(endDate, resolution) + "}");
+			AssertQueryEquals(qp, field, field + ":[" + EscapeDateString(startDate) + " TO " + EscapeDateString(endDate) + "]", "[" + GetDate(startDate, resolution) + " TO " + GetDate(endDateInclusive, resolution) + "]");
+			AssertQueryEquals(qp, field, field + ":{" + EscapeDateString(startDate) + " TO " + EscapeDateString(endDate) + "}", "{" + GetDate(startDate, resolution) + " TO " + GetDate(endDate, resolution) + "}");
 		}
 		
 		[Test]
@@ -739,9 +778,9 @@
 			AssertQueryEquals("\\\\", a, "\\"); // escaped backslash
 			
 			AssertParseException("\\"); // a backslash must always be escaped
-
-            // LUCENE-1189
-            AssertQueryEquals("(\"a\\\\\") or (\"b\")", a, "a\\ or b");
+			
+			// LUCENE-1189
+			AssertQueryEquals("(\"a\\\\\") or (\"b\")", a, "a\\ or b");
 		}
 		
 		[Test]
@@ -821,8 +860,10 @@
 		[Test]
 		public virtual void  TestBoost()
 		{
-			StandardAnalyzer oneStopAnalyzer = new StandardAnalyzer(new System.String[]{"on"});
-			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", oneStopAnalyzer);
+			System.Collections.Hashtable stopWords = new System.Collections.Hashtable(1);
+			SupportClass.CollectionsHelper.AddIfNotContains(stopWords, "on");
+			StandardAnalyzer oneStopAnalyzer = new StandardAnalyzer(stopWords);
+			QueryParser qp = new QueryParser("field", oneStopAnalyzer);
 			Query q = qp.Parse("on^1.0");
 			Assert.IsNotNull(q);
 			q = qp.Parse("\"hello\"^2.0");
@@ -834,7 +875,7 @@
 			q = qp.Parse("\"on\"^1.0");
 			Assert.IsNotNull(q);
 			
-			Lucene.Net.QueryParsers.QueryParser qp2 = new Lucene.Net.QueryParsers.QueryParser("field", new StandardAnalyzer());
+			QueryParser qp2 = new QueryParser("field", new StandardAnalyzer());
 			q = qp2.Parse("the^3");
 			// "the" is a stop word so the result is an empty query:
 			Assert.IsNotNull(q);
@@ -846,9 +887,9 @@
 		{
 			try
 			{
-				Query q = GetQuery(queryString, null);
+				GetQuery(queryString, null);
 			}
-			catch (ParseException)
+			catch (ParseException expected)
 			{
 				return ;
 			}
@@ -858,14 +899,15 @@
 		[Test]
 		public virtual void  TestException()
 		{
-            AssertParseException("\"some phrase");
-            AssertParseException("(foo bar");
-            AssertParseException("foo bar))");
-            AssertParseException("field:term:with:colon some more terms");
-            AssertParseException("(sub query)^5.0^2.0 plus more");
-            AssertParseException("secret AND illegal) AND access:confidential");
+			AssertParseException("\"some phrase");
+			AssertParseException("(foo bar");
+			AssertParseException("foo bar))");
+			AssertParseException("field:term:with:colon some more terms");
+			AssertParseException("(sub query)^5.0^2.0 plus more");
+			AssertParseException("secret AND illegal) AND access:confidential");
 		}
 		
+		
 		[Test]
 		public virtual void  TestCustomQueryParserWildcard()
 		{
@@ -874,7 +916,7 @@
 				new QPTestParser("contents", new WhitespaceAnalyzer()).Parse("a?t");
 				Assert.Fail("Wildcard queries should not be allowed");
 			}
-			catch (ParseException)
+			catch (ParseException expected)
 			{
 				// expected exception
 			}
@@ -888,7 +930,7 @@
 				new QPTestParser("contents", new WhitespaceAnalyzer()).Parse("xunit~");
 				Assert.Fail("Fuzzy queries should not be allowed");
 			}
-			catch (ParseException)
+			catch (ParseException expected)
 			{
 				// expected exception
 			}
@@ -900,13 +942,13 @@
 			BooleanQuery.SetMaxClauseCount(2);
 			try
 			{
-				Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new WhitespaceAnalyzer());
+				QueryParser qp = new QueryParser("field", new WhitespaceAnalyzer());
 				qp.Parse("one two three");
-				Assert.Fail("ParseException expected due to too many bool clauses");
+				Assert.Fail("ParseException expected due to too many boolean clauses");
 			}
-			catch (ParseException)
+			catch (ParseException expected)
 			{
-				// too many bool clauses, so ParseException is expected
+				// too many boolean clauses, so ParseException is expected
 			}
 		}
 		
@@ -914,7 +956,7 @@
 		[Test]
 		public virtual void  TestPrecedence()
 		{
-			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new WhitespaceAnalyzer());
+			QueryParser qp = new QueryParser("field", new WhitespaceAnalyzer());
 			Query query1 = qp.Parse("A AND B OR C AND D");
 			Query query2 = qp.Parse("+A +B +C +D");
 			Assert.AreEqual(query1, query2);
@@ -923,12 +965,12 @@
 		[Test]
 		public virtual void  TestLocalDateFormat()
 		{
-			Lucene.Net.Store.RAMDirectory ramDir = new Lucene.Net.Store.RAMDirectory();
-			Lucene.Net.Index.IndexWriter iw = new Lucene.Net.Index.IndexWriter(ramDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			RAMDirectory ramDir = new RAMDirectory();
+			IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			AddDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
 			AddDateDoc("b", 2005, 12, 4, 22, 15, 0, iw);
 			iw.Close();
-			Lucene.Net.Search.IndexSearcher is_Renamed = new Lucene.Net.Search.IndexSearcher(ramDir);
+			IndexSearcher is_Renamed = new IndexSearcher(ramDir);
 			AssertHits(1, "[12/1/2005 TO 12/3/2005]", is_Renamed);
 			AssertHits(2, "[12/1/2005 TO 12/4/2005]", is_Renamed);
 			AssertHits(1, "[12/3/2005 TO 12/4/2005]", is_Renamed);
@@ -942,7 +984,7 @@
 		public virtual void  TestStarParsing()
 		{
 			int[] type = new int[1];
-			Lucene.Net.QueryParsers.QueryParser qp = new AnonymousClassQueryParser(type, this, "field", new WhitespaceAnalyzer());
+			QueryParser qp = new AnonymousClassQueryParser(type, this, "field", new WhitespaceAnalyzer());
 			
 			TermQuery tq;
 			
@@ -983,7 +1025,7 @@
 		[Test]
 		public virtual void  TestStopwords()
 		{
-			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("a", new StopAnalyzer(new System.String[] { "the", "foo" }));
+			QueryParser qp = new QueryParser("a", new StopAnalyzer(new System.String[]{"the", "foo"}));
 			Query result = qp.Parse("a:the OR a:foo");
 			Assert.IsNotNull(result, "result is null and it shouldn't be");
 			Assert.IsTrue(result is BooleanQuery, "result is not a BooleanQuery");
@@ -1005,7 +1047,7 @@
 			StopFilter.SetEnablePositionIncrementsDefault(true);
 			try
 			{
-				Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("a", new StopAnalyzer(new System.String[] { "the", "in", "are", "this" }));
+				QueryParser qp = new QueryParser("a", new StopAnalyzer(new System.String[]{"the", "in", "are", "this"}));
 				qp.SetEnablePositionIncrements(true);
 				System.String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\"";
 				//               0         2                      5           7  8
@@ -1030,7 +1072,7 @@
 		[Test]
 		public virtual void  TestMatchAllDocs()
 		{
-			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new WhitespaceAnalyzer());
+			QueryParser qp = new QueryParser("field", new WhitespaceAnalyzer());
 			Assert.AreEqual(new MatchAllDocsQuery(), qp.Parse("*:*"));
 			Assert.AreEqual(new MatchAllDocsQuery(), qp.Parse("(*:*)"));
 			BooleanQuery bq = (BooleanQuery) qp.Parse("+*:* -*:*");
@@ -1038,26 +1080,27 @@
 			Assert.IsTrue(bq.GetClauses()[1].GetQuery() is MatchAllDocsQuery);
 		}
 		
-		private void  AssertHits(int expected, System.String query, Lucene.Net.Search.IndexSearcher is_Renamed)
+		private void  AssertHits(int expected, System.String query, IndexSearcher is_Renamed)
 		{
-			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("date", new WhitespaceAnalyzer());
-			qp.SetLocale(new System.Globalization.CultureInfo("en-US"));
+			QueryParser qp = new QueryParser("date", new WhitespaceAnalyzer());
+			qp.SetLocale(new System.Globalization.CultureInfo("en"));
 			Query q = qp.Parse(query);
 			ScoreDoc[] hits = is_Renamed.Search(q, null, 1000).scoreDocs;
 			Assert.AreEqual(expected, hits.Length);
 		}
 		
-		private static void  AddDateDoc(System.String content, int year, int month, int day, int hour, int minute, int second, Lucene.Net.Index.IndexWriter iw)
+		private static void  AddDateDoc(System.String content, int year, int month, int day, int hour, int minute, int second, IndexWriter iw)
 		{
-			Lucene.Net.Documents.Document d = new Lucene.Net.Documents.Document();
-			d.Add(new Lucene.Net.Documents.Field("f", content, Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED));
-			System.DateTime tempAux = new System.DateTime(year, month, day, hour, minute, second);
-			d.Add(new Lucene.Net.Documents.Field("date", DateField.DateToString(tempAux), Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.NOT_ANALYZED));
+			Document d = new Document();
+			d.Add(new Field("f", content, Field.Store.YES, Field.Index.ANALYZED));
+			System.Globalization.Calendar cal = new System.Globalization.GregorianCalendar();
+			System.DateTime tempAux = new System.DateTime(year, month, day, hour, minute, second, cal);
+			d.Add(new Field("date", DateField.DateToString(tempAux), Field.Store.YES, Field.Index.NOT_ANALYZED));
 			iw.AddDocument(d);
 		}
 		
 		[TearDown]
-		public override void TearDown()
+		public override void  TearDown()
 		{
 			base.TearDown();
 			BooleanQuery.SetMaxClauseCount(originalMaxClauses);

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/BaseTestRangeFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/BaseTestRangeFilter.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/BaseTestRangeFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/BaseTestRangeFilter.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,44 +19,66 @@
 
 using NUnit.Framework;
 
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 
 namespace Lucene.Net.Search
 {
-	[TestFixture]
-	public class BaseTestRangeFilter : LuceneTestCase
+	
+    [TestFixture]
+	public class BaseTestRangeFilter:LuceneTestCase
 	{
+		private void  InitBlock()
+		{
+			signedIndex = new TestIndex(this, System.Int32.MaxValue, System.Int32.MinValue, true);
+			unsignedIndex = new TestIndex(this, System.Int32.MaxValue, 0, false);
+		}
+		
 		public const bool F = false;
 		public const bool T = true;
 		
-		internal System.Random rand = new System.Random((System.Int32) 101); // use a set seed to test is deterministic
+		protected internal System.Random rand;
 		
-        /// <summary>
-        /// Collation interacts badly with hyphens -- collation produces different ordering than Unicode code-point
-        /// ordering -- so two indexes are created: one which can't have negative random integers, for testing collated
-        /// ranges, and the other which can have negative random integers, for all other tests
-        /// </summary>
-        internal class TestIndex
-        {
-            internal int maxR = System.Int32.MinValue;
-            internal int minR = System.Int32.MaxValue;
-            internal bool allowNegativeRandomInts;
-            internal RAMDirectory index = new RAMDirectory();
-
-            internal TestIndex(int minR, int maxR, bool allowNegativeRandomInts)
-            {
-                this.minR = minR;
-                this.maxR = maxR;
-                this.allowNegativeRandomInts = allowNegativeRandomInts;
-            }
-        }
-        internal TestIndex signedIndex = new TestIndex(int.MaxValue, int.MinValue, true);
-        internal TestIndex unsignedIndex = new TestIndex(int.MaxValue, 0, false);
+		/// <summary> Collation interacts badly with hyphens -- collation produces different
+		/// ordering than Unicode code-point ordering -- so two indexes are created:
+		/// one which can't have negative random integers, for testing collated 
+		/// ranges, and the other which can have negative random integers, for all
+		/// other tests. 
+		/// </summary>
+		internal class TestIndex
+		{
+			private void  InitBlock(BaseTestRangeFilter enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private BaseTestRangeFilter enclosingInstance;
+			public BaseTestRangeFilter Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal int maxR;
+			internal int minR;
+			internal bool allowNegativeRandomInts;
+			internal RAMDirectory index = new RAMDirectory();
+			
+			internal TestIndex(BaseTestRangeFilter enclosingInstance, int minR, int maxR, bool allowNegativeRandomInts)
+			{
+				InitBlock(enclosingInstance);
+				this.minR = minR;
+				this.maxR = maxR;
+				this.allowNegativeRandomInts = allowNegativeRandomInts;
+			}
+		}
+		internal TestIndex signedIndex;
+		internal TestIndex unsignedIndex;
 		
 		internal int minId = 0;
 		internal int maxId = 10000;
@@ -84,16 +106,20 @@
 			return b.ToString();
 		}
 		
-		public BaseTestRangeFilter(System.String name)
+		public BaseTestRangeFilter(System.String name):base(name)
 		{
-            Build(signedIndex);
-            Build(unsignedIndex);
+			InitBlock();
+			rand = NewRandom();
+			Build(signedIndex);
+			Build(unsignedIndex);
 		}
 		public BaseTestRangeFilter()
 		{
-            Build(signedIndex);
-            Build(unsignedIndex);
-        }
+			InitBlock();
+			rand = NewRandom();
+			Build(signedIndex);
+			Build(unsignedIndex);
+		}
 		
 		private void  Build(TestIndex index)
 		{
@@ -105,9 +131,9 @@
 				
 				for (int d = minId; d <= maxId; d++)
 				{
-					Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+					Document doc = new Document();
 					doc.Add(new Field("id", Pad(d), Field.Store.YES, Field.Index.NOT_ANALYZED));
-					int r = index.allowNegativeRandomInts? rand.Next() : rand.Next(int.MaxValue);
+					int r = index.allowNegativeRandomInts ? rand.Next() : rand.Next(System.Int32.MaxValue);
 					if (index.maxR < r)
 					{
 						index.maxR = r;
@@ -126,10 +152,10 @@
 			}
 			catch (System.Exception e)
 			{
-				throw new System.Exception("can't build index", e);
+				throw new System.SystemException("can't build index", e);
 			}
 		}
-
+		
 		[Test]
 		public virtual void  TestPad()
 		{

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/CachingWrapperFilterHelper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/CachingWrapperFilterHelper.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/CachingWrapperFilterHelper.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/CachingWrapperFilterHelper.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -26,12 +26,14 @@
 	
 	/// <summary> A unit test helper class to test when the filter is getting cached and when it is not.</summary>
 	[Serializable]
-	public class CachingWrapperFilterHelper : CachingWrapperFilter
+	public class CachingWrapperFilterHelper:CachingWrapperFilter
 	{
 		
 		private bool shouldHaveCache = false;
 		
-		public CachingWrapperFilterHelper(Filter filter) : base(filter)
+		/// <param name="filter">Filter to cache results of
+		/// </param>
+		public CachingWrapperFilterHelper(Filter filter):base(filter)
 		{
 		}
 		
@@ -57,8 +59,7 @@
 				}
 				else
 				{
-					Assert.IsNull(cached, cached == null ? "Cache should be null " : "Cache should be null " + cached.ToString());
-					// argument evaluated prior to method call ->//Assert.IsNull(cached, "Cache should be null " + cached.ToString());
+					Assert.IsNotNull( cached, "Cache should be null " + cached);
 				}
 				if (cached != null)
 				{
@@ -88,9 +89,10 @@
 				return false;
 			return this.filter.Equals((CachingWrapperFilterHelper) o);
 		}
+		
 		public override int GetHashCode()
 		{
-			return base.GetHashCode();
+			return this.filter.GetHashCode() ^ 0x5525aacb;
 		}
 	}
 }
\ No newline at end of file