You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by cc...@apache.org on 2011/11/16 06:24:28 UTC

[Lucene.Net] svn commit: r1202532 [2/3] - in /incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk: src/core/Analysis/ src/core/Index/ src/core/Search/ src/core/Store/ src/demo/Demo.Common/ test/core/ test/core/Index/ test/core/QueryParser/ test/core/Search/ test/c...

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestSnapshotDeletionPolicy.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestSnapshotDeletionPolicy.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestSnapshotDeletionPolicy.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestSnapshotDeletionPolicy.cs Wed Nov 16 05:24:27 2011
@@ -75,30 +75,40 @@ namespace Lucene.Net.Index
 			{
 				Document doc = new Document();
 				doc.Add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
-				while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < stopTime)
-				{
-					for (int i = 0; i < 27; i++)
-					{
-						try
-						{
-							writer.AddDocument(doc);
-						}
-						catch (System.Exception t)
-						{
-							System.Console.Out.WriteLine(t.StackTrace);
-							Assert.Fail("addDocument failed");
-						}
-					}
-					try
-					{
-						System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 1));
-					}
-					catch (System.Threading.ThreadInterruptedException ie)
-					{
-						ThreadClass.Current().Interrupt();
-						throw new System.SystemException("", ie);
-					}
-				}
+			    do
+			    {
+			        for (int i = 0; i < 27; i++)
+			        {
+			            try
+			            {
+			                writer.AddDocument(doc);
+			            }
+			            catch (System.Exception t)
+			            {
+			                System.Console.Out.WriteLine(t.StackTrace);
+			                Assert.Fail("addDocument failed");
+			            }
+			            if (i%2 == 0)
+			            {
+			                try
+			                {
+			                    writer.Commit();
+			                }
+			                catch (Exception e)
+			                {
+			                    throw new SystemException("", e);
+			                }
+			            }
+			        }
+			        try
+			        {
+			            System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000*1));
+			        }
+			        catch (System.Threading.ThreadInterruptedException ie)
+			        {
+			            throw;
+			        }
+			    } while ((DateTime.Now.Ticks/TimeSpan.TicksPerMillisecond) < stopTime);
 			}
 		}
 		public const System.String INDEX_PATH = "test.snapshots";
@@ -106,11 +116,9 @@ namespace Lucene.Net.Index
         [Test]
 		public virtual void  TestSnapshotDeletionPolicy_Renamed()
 		{
-			System.IO.DirectoryInfo dir = new System.IO.DirectoryInfo(System.IO.Path.Combine(AppSettings.Get("tempDir", ""), INDEX_PATH));
+			System.IO.DirectoryInfo dir = _TestUtil.GetTempDir(INDEX_PATH);
 			try
 			{
-				// Sometimes past test leaves the dir
-				_TestUtil.RmDir(dir);
 				Directory fsDir = FSDirectory.Open(dir);
 				RunTest(fsDir);
 				fsDir.Close();
@@ -131,27 +139,39 @@ namespace Lucene.Net.Index
 			Directory dir = new MockRAMDirectory();
 			
 			SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
-			IndexWriter writer = new IndexWriter(dir, true, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp);
-			// Force frequent commits
+            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
+			// Force frequent flushes
 			writer.SetMaxBufferedDocs(2);
 			Document doc = new Document();
 			doc.Add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
-			for (int i = 0; i < 7; i++)
-				writer.AddDocument(doc);
-			IndexCommit cp = (IndexCommit) dp.Snapshot();
+            for (int i = 0; i < 7; i++)
+            {
+                writer.AddDocument(doc);
+                if (i % 2 == 0)
+                {
+                    writer.Commit();
+                }
+            }
+            IndexCommit cp =  dp.Snapshot();
 			CopyFiles(dir, cp);
 			writer.Close();
 			CopyFiles(dir, cp);
-			
-			writer = new IndexWriter(dir, true, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp);
+
+            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
 			CopyFiles(dir, cp);
-			for (int i = 0; i < 7; i++)
-				writer.AddDocument(doc);
+            for (int i = 0; i < 7; i++)
+            {
+                writer.AddDocument(doc);
+                if (i % 2 == 0)
+                {
+                    writer.Commit();
+                }
+            }
 			CopyFiles(dir, cp);
 			writer.Close();
 			CopyFiles(dir, cp);
 			dp.Release();
-			writer = new IndexWriter(dir, true, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp);
+            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
 			writer.Close();
 			try
 			{
@@ -171,9 +191,9 @@ namespace Lucene.Net.Index
 			long stopTime = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) + 7000;
 			
 			SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
-			IndexWriter writer = new IndexWriter(dir, true, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp);
+			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
 			
-			// Force frequent commits
+			// Force frequent flushes
 			writer.SetMaxBufferedDocs(2);
 			
 			ThreadClass t = new AnonymousClassThread(stopTime, writer, this);
@@ -182,13 +202,13 @@ namespace Lucene.Net.Index
 			
 			// While the above indexing thread is running, take many
 			// backups:
-			while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < stopTime)
-			{
-				BackupIndex(dir, dp);
-				System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 20));
-				if (!t.IsAlive)
-					break;
-			}
+		    do
+		    {
+		        BackupIndex(dir, dp);
+		        System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000*20));
+		        if (!t.IsAlive)
+		            break;
+		    } while ((DateTime.Now.Ticks/TimeSpan.TicksPerMillisecond) < stopTime);
 			
 			t.Join();
 			
@@ -234,10 +254,8 @@ namespace Lucene.Net.Index
 			// we take to do the backup, the IndexWriter will
 			// never delete the files in the snapshot:
 			System.Collections.Generic.ICollection<string> files = cp.GetFileNames();
-			System.Collections.IEnumerator it = files.GetEnumerator();
-			while (it.MoveNext())
+            foreach (string fileName in files)
 			{
-				System.String fileName = (System.String) it.Current;
 				// NOTE: in a real backup you would not use
 				// readFile; you would need to use something else
 				// that copies the file to a backup location.  This

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestStressIndexing.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestStressIndexing.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestStressIndexing.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestStressIndexing.cs Wed Nov 16 05:24:27 2011
@@ -151,9 +151,9 @@ namespace Lucene.Net.Index
 		Run one indexer and 2 searchers against single index as
 		stress test.
 		*/
-		public virtual void  RunStressTest(Directory directory, bool autoCommit, MergeScheduler mergeScheduler)
+		public virtual void  RunStressTest(Directory directory, MergeScheduler mergeScheduler)
 		{
-			IndexWriter modifier = new IndexWriter(directory, autoCommit, ANALYZER, true);
+		    IndexWriter modifier = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
 			
 			modifier.SetMaxBufferedDocs(10);
 			
@@ -204,36 +204,16 @@ namespace Lucene.Net.Index
 		public virtual void  TestStressIndexAndSearching()
 		{
 			RANDOM = NewRandom();
-			
-			// RAMDir
-			Directory directory = new MockRAMDirectory();
-			RunStressTest(directory, true, null);
-			directory.Close();
-			
-			// FSDir
-			System.IO.FileInfo dirPath = _TestUtil.GetTempDir("lucene.test.stress");
-			directory = FSDirectory.Open(dirPath);
-			RunStressTest(directory, true, null);
-			directory.Close();
-			
+
 			// With ConcurrentMergeScheduler, in RAMDir
-			directory = new MockRAMDirectory();
-			RunStressTest(directory, true, new ConcurrentMergeScheduler());
+			Directory directory = new MockRAMDirectory();
+			RunStressTest(directory, new ConcurrentMergeScheduler());
 			directory.Close();
 			
 			// With ConcurrentMergeScheduler, in FSDir
+		    var dirPath = _TestUtil.GetTempDir("lucene.test.stress");
 			directory = FSDirectory.Open(dirPath);
-			RunStressTest(directory, true, new ConcurrentMergeScheduler());
-			directory.Close();
-			
-			// With ConcurrentMergeScheduler and autoCommit=false, in RAMDir
-			directory = new MockRAMDirectory();
-			RunStressTest(directory, false, new ConcurrentMergeScheduler());
-			directory.Close();
-			
-			// With ConcurrentMergeScheduler and autoCommit=false, in FSDir
-			directory = FSDirectory.Open(dirPath);
-			RunStressTest(directory, false, new ConcurrentMergeScheduler());
+			RunStressTest(directory, new ConcurrentMergeScheduler());
 			directory.Close();
 			
 			_TestUtil.RmDir(dirPath);

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestStressIndexing2.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestStressIndexing2.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestStressIndexing2.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestStressIndexing2.cs Wed Nov 16 05:24:27 2011
@@ -16,6 +16,8 @@
  */
 
 using System;
+using System.Collections.Generic;
+using System.Linq;
 using Lucene.Net.Support;
 using NUnit.Framework;
 
@@ -43,7 +45,6 @@ namespace Lucene.Net.Index
 		internal static int maxFields = 4;
 		internal static int bigFieldSize = 10;
 		internal static bool sameFieldOrder = false;
-		internal static bool autoCommit = false;
 		internal static int mergeFactor = 3;
 		internal static int maxBufferedDocs = 3;
 		new internal static int seed = 0;
@@ -66,7 +67,7 @@ namespace Lucene.Net.Index
 				
 			}
 			
-			public MockIndexWriter(TestStressIndexing2 enclosingInstance, Directory dir, bool autoCommit, Analyzer a, bool create):base(dir, autoCommit, a, create)
+			public MockIndexWriter(TestStressIndexing2 enclosingInstance, Directory dir, Analyzer a, bool create, IndexWriter.MaxFieldLength mfl):base(dir, a, create, mfl)
 			{
 				InitBlock(enclosingInstance);
 			}
@@ -127,7 +128,6 @@ namespace Lucene.Net.Index
 			{
 				// increase iterations for better testing
 				sameFieldOrder = r.NextDouble() > 0.5;
-				autoCommit = r.NextDouble() > 0.5;
 				mergeFactor = r.Next(3) + 2;
 				maxBufferedDocs = r.Next(3) + 2;
 				seed++;
@@ -161,7 +161,7 @@ namespace Lucene.Net.Index
 		public virtual DocsAndWriter IndexRandomIWReader(int nThreads, int iterations, int range, Directory dir)
 		{
 			System.Collections.Hashtable docs = new System.Collections.Hashtable();
-			IndexWriter w = new MockIndexWriter(this, dir, autoCommit, new WhitespaceAnalyzer(), true);
+			IndexWriter w = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
 			w.SetUseCompoundFile(false);
 			
 			/***
@@ -220,7 +220,7 @@ namespace Lucene.Net.Index
 			System.Collections.IDictionary docs = new System.Collections.Hashtable();
 			for (int iter = 0; iter < 3; iter++)
 			{
-				IndexWriter w = new MockIndexWriter(this, dir, autoCommit, new WhitespaceAnalyzer(), true);
+				IndexWriter w = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
 				w.SetUseCompoundFile(false);
 				
 				// force many merges
@@ -280,7 +280,7 @@ namespace Lucene.Net.Index
 			while (iter.MoveNext())
 			{
 				Document d = (Document) iter.Current;
-				System.Collections.ArrayList fields = new System.Collections.ArrayList();
+                var fields = new List<Fieldable>();
 				fields.AddRange(d.GetFields());
 				// put fields in same order each time
                 //{{Lucene.Net-2.9.1}} No, don't change the order of the fields
@@ -301,15 +301,15 @@ namespace Lucene.Net.Index
 		
 		public static void  VerifyEquals(IndexReader r1, Directory dir2, System.String idField)
 		{
-			IndexReader r2 = IndexReader.Open(dir2);
+		    IndexReader r2 = IndexReader.Open(dir2, true);
 			VerifyEquals(r1, r2, idField);
 			r2.Close();
 		}
 		
 		public static void  VerifyEquals(Directory dir1, Directory dir2, System.String idField)
 		{
-			IndexReader r1 = IndexReader.Open(dir1);
-			IndexReader r2 = IndexReader.Open(dir2);
+			IndexReader r1 = IndexReader.Open(dir1, true);
+		    IndexReader r2 = IndexReader.Open(dir2, true);
 			VerifyEquals(r1, r2, idField);
 			r1.Close();
 			r2.Close();
@@ -479,16 +479,16 @@ namespace Lucene.Net.Index
 		
 		public static void  VerifyEquals(Document d1, Document d2)
 		{
-			System.Collections.IList ff1 = d1.GetFields();
-			System.Collections.IList ff2 = d2.GetFields();
-			
-			CollectionsHelper.Sort(ff1, fieldNameComparator);
-			CollectionsHelper.Sort(ff2, fieldNameComparator);
+			var ff1 = d1.GetFields();
+			var ff2 = d2.GetFields();
+
+		    ff1.OrderBy(x => x.Name());
+		    ff2.OrderBy(x => x.Name());
 			
 			if (ff1.Count != ff2.Count)
 			{
-				System.Console.Out.WriteLine(CollectionsHelper.CollectionToString(ff1));
-				System.Console.Out.WriteLine(CollectionsHelper.CollectionToString(ff2));
+                System.Console.Out.WriteLine("[" + String.Join(",", ff1.Select(x => x.ToString())) + "]");
+                System.Console.Out.WriteLine("[" + String.Join(",", ff2.Select(x => x.ToString())) + "]");
 				Assert.AreEqual(ff1.Count, ff2.Count);
 			}
 			
@@ -509,8 +509,8 @@ namespace Lucene.Net.Index
 					if (!s1.Equals(s2))
 					{
 						// print out whole doc on error
-						System.Console.Out.WriteLine(CollectionsHelper.CollectionToString(ff1));
-						System.Console.Out.WriteLine(CollectionsHelper.CollectionToString(ff2));
+                        System.Console.Out.WriteLine("[" + String.Join(",", ff1.Select(x => x.ToString())) + "]");
+                        System.Console.Out.WriteLine("[" + String.Join(",", ff2.Select(x => x.ToString())) + "]");
 						Assert.AreEqual(s1, s2);
 					}
 				}

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestTermVectorsReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestTermVectorsReader.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestTermVectorsReader.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestTermVectorsReader.cs Wed Nov 16 05:24:27 2011
@@ -63,7 +63,7 @@ namespace Lucene.Net.Index
             InitBlock();
         }
 		
-		internal class TestToken : System.IComparable
+		internal class TestToken : System.IComparable<TestToken>
 		{
 			public TestToken(TestTermVectorsReader enclosingInstance)
 			{
@@ -86,9 +86,9 @@ namespace Lucene.Net.Index
 			internal int pos;
 			internal int startOffset;
 			internal int endOffset;
-			public virtual int CompareTo(System.Object other)
+			public virtual int CompareTo(TestToken other)
 			{
-				return pos - ((TestToken) other).pos;
+				return pos - other.pos;
 			}
 		}
 		
@@ -147,7 +147,7 @@ namespace Lucene.Net.Index
 			//terms
 			for (int j = 0; j < 5; j++)
 				writer.AddDocument(doc);
-			writer.Flush();
+			writer.Commit();
 			seg = writer.NewestSegment().name;
 			writer.Close();
 			
@@ -446,7 +446,7 @@ namespace Lucene.Net.Index
 			}
 			
 			// test setDocumentNumber()
-			IndexReader ir = IndexReader.Open(dir);
+		    IndexReader ir = IndexReader.Open(dir, true);
 			DocNumAwareMapper docNumAwareMapper = new DocNumAwareMapper();
 			Assert.AreEqual(- 1, docNumAwareMapper.GetDocumentNumber());
 			

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestThreadedOptimize.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestThreadedOptimize.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestThreadedOptimize.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestThreadedOptimize.cs Wed Nov 16 05:24:27 2011
@@ -107,10 +107,10 @@ namespace Lucene.Net.Index
 			failed = true;
 		}
 		
-		public virtual void  runTest(Directory directory, bool autoCommit, MergeScheduler merger)
+		public virtual void  runTest(Directory directory, MergeScheduler merger)
 		{
 			
-			IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true);
+			IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
 			writer.SetMaxBufferedDocs(2);
 			if (merger != null)
 				writer.SetMergeScheduler(merger);
@@ -132,8 +132,6 @@ namespace Lucene.Net.Index
 				writer.SetMergeFactor(4);
 				//writer.setInfoStream(System.out);
 				
-				int docCount = writer.MaxDoc();
-				
 				ThreadClass[] threads = new ThreadClass[NUM_THREADS];
 				
 				for (int i = 0; i < NUM_THREADS; i++)
@@ -157,14 +155,11 @@ namespace Lucene.Net.Index
 				
 				Assert.AreEqual(expectedDocCount, writer.MaxDoc());
 				
-				if (!autoCommit)
-				{
-					writer.Close();
-					writer = new IndexWriter(directory, autoCommit, ANALYZER, false);
-					writer.SetMaxBufferedDocs(2);
-				}
-				
-				IndexReader reader = IndexReader.Open(directory);
+				writer.Close();
+				writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED);
+				writer.SetMaxBufferedDocs(2);
+
+			    IndexReader reader = IndexReader.Open(directory, true);
 				Assert.IsTrue(reader.IsOptimized());
 				Assert.AreEqual(expectedDocCount, reader.NumDocs());
 				reader.Close();
@@ -180,10 +175,8 @@ namespace Lucene.Net.Index
 		public virtual void  TestThreadedOptimize_Renamed()
 		{
 			Directory directory = new MockRAMDirectory();
-			runTest(directory, false, new SerialMergeScheduler());
-			runTest(directory, true, new SerialMergeScheduler());
-			runTest(directory, false, new ConcurrentMergeScheduler());
-			runTest(directory, true, new ConcurrentMergeScheduler());
+			runTest(directory, new SerialMergeScheduler());
+			runTest(directory, new ConcurrentMergeScheduler());
 			directory.Close();
 			
 			System.String tempDir = AppSettings.Get("tempDir", "");
@@ -191,11 +184,9 @@ namespace Lucene.Net.Index
 				throw new System.IO.IOException("tempDir undefined, cannot run test");
 			
 			System.String dirName = tempDir + "/luceneTestThreadedOptimize";
-			directory = FSDirectory.Open(new System.IO.FileInfo(dirName));
-			runTest(directory, false, new SerialMergeScheduler());
-			runTest(directory, true, new SerialMergeScheduler());
-			runTest(directory, false, new ConcurrentMergeScheduler());
-			runTest(directory, true, new ConcurrentMergeScheduler());
+			directory = FSDirectory.Open(new System.IO.DirectoryInfo(dirName));
+			runTest(directory, new SerialMergeScheduler());
+			runTest(directory, new ConcurrentMergeScheduler());
 			directory.Close();
 			_TestUtil.RmDir(dirName);
 		}

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Lucene.Net.Test.csproj
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Lucene.Net.Test.csproj?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Lucene.Net.Test.csproj (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Lucene.Net.Test.csproj Wed Nov 16 05:24:27 2011
@@ -32,8 +32,7 @@
     <AssemblyKeyContainerName>
     </AssemblyKeyContainerName>
     <AssemblyName>Lucene.Net.Test</AssemblyName>
-    <AssemblyOriginatorKeyFile>
-    </AssemblyOriginatorKeyFile>
+    <AssemblyOriginatorKeyFile>Lucene.Net.snk</AssemblyOriginatorKeyFile>
     <DefaultClientScript>JScript</DefaultClientScript>
     <DefaultHTMLPageLayout>Grid</DefaultHTMLPageLayout>
     <DefaultTargetSchema>IE50</DefaultTargetSchema>
@@ -113,6 +112,9 @@
   <PropertyGroup>
     <DefineConstants>$(DefineConstants);$(ExternalConstants)</DefineConstants>
   </PropertyGroup>
+  <PropertyGroup>
+    <SignAssembly>true</SignAssembly>
+  </PropertyGroup>
   <ItemGroup>
     <Reference Include="ICSharpCode.SharpZipLib, Version=0.85.5.452, Culture=neutral, processorArchitecture=MSIL">
       <SpecificVersion>False</SpecificVersion>
@@ -500,6 +502,7 @@
     <None Include="Index\index.23.nocfs.zip" />
     <None Include="Index\index.24.cfs.zip" />
     <None Include="Index\index.24.nocfs.zip" />
+    <None Include="Lucene.Net.snk" />
   </ItemGroup>
   <ItemGroup>
     <EmbeddedResource Include="Messages\MessagesTestBundle.ja.resources">

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/QueryParser/TestMultiAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/QueryParser/TestMultiAnalyzer.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/QueryParser/TestMultiAnalyzer.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/QueryParser/TestMultiAnalyzer.cs Wed Nov 16 05:24:27 2011
@@ -49,7 +49,7 @@ namespace Lucene.Net.QueryParsers
 		public virtual void  TestMultiAnalyzer_Rename()
 		{
 			
-			QueryParser qp = new QueryParser("", new MultiAnalyzer(this));
+			QueryParser qp = new QueryParser(Util.Version.LUCENE_CURRENT, "", new MultiAnalyzer(this));
 			
 			// trivial, no multiple tokens:
 			Assert.AreEqual("foo", qp.Parse("foo").ToString());
@@ -115,7 +115,7 @@ namespace Lucene.Net.QueryParsers
 		[Test]
 		public virtual void  TestPosIncrementAnalyzer()
 		{
-			QueryParser qp = new QueryParser("", new PosIncrementAnalyzer(this));
+			QueryParser qp = new QueryParser(Util.Version.LUCENE_CURRENT, "", new PosIncrementAnalyzer(this));
 			Assert.AreEqual("quick brown", qp.Parse("the quick brown").ToString());
 			Assert.AreEqual("\"quick brown\"", qp.Parse("\"the quick brown\"").ToString());
 			Assert.AreEqual("quick brown fox", qp.Parse("the quick brown fox").ToString());
@@ -148,7 +148,7 @@ namespace Lucene.Net.QueryParsers
 			
 			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
 			{
-				TokenStream result = new StandardTokenizer(reader);
+				TokenStream result = new StandardTokenizer(Util.Version.LUCENE_CURRENT, reader);
 				result = new TestFilter(enclosingInstance, result);
 				result = new LowerCaseFilter(result);
 				return result;
@@ -255,7 +255,7 @@ namespace Lucene.Net.QueryParsers
 			
 			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
 			{
-				TokenStream result = new StandardTokenizer(reader);
+				TokenStream result = new StandardTokenizer(Util.Version.LUCENE_CURRENT, reader);
 				result = new TestPosIncrementFilter(enclosingInstance, result);
 				result = new LowerCaseFilter(result);
 				return result;
@@ -315,7 +315,7 @@ namespace Lucene.Net.QueryParsers
 		private sealed class DumbQueryParser:QueryParser
 		{
 			
-			public DumbQueryParser(System.String f, Analyzer a):base(f, a)
+			public DumbQueryParser(System.String f, Analyzer a):base(Util.Version.LUCENE_CURRENT, f, a)
 			{
 			}
 			

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/QueryParser/TestMultiFieldQueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/QueryParser/TestMultiFieldQueryParser.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/QueryParser/TestMultiFieldQueryParser.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/QueryParser/TestMultiFieldQueryParser.cs Wed Nov 16 05:24:27 2011
@@ -16,7 +16,8 @@
  */
 
 using System;
-
+using System.Collections.Generic;
+using Lucene.Net.Util;
 using NUnit.Framework;
 
 using Analyzer = Lucene.Net.Analysis.Analyzer;
@@ -39,7 +40,7 @@ namespace Lucene.Net.QueryParsers
 	
 	/// <summary> Tests QueryParser.</summary>
     [TestFixture]
-	public class TestMultiFieldQueryParser:BaseTokenStreamTestCase
+	public class TestMultiFieldQueryParser : LuceneTestCase
 	{
 		
 		/// <summary>test stop words arsing for both the non static form, and for the 
@@ -62,12 +63,12 @@ namespace Lucene.Net.QueryParsers
 			System.String[] fields = new System.String[]{"b", "t"};
 			Occur[] occur = new Occur[]{Occur.SHOULD, Occur.SHOULD};
 			TestQueryParser.QPTestAnalyzer a = new TestQueryParser.QPTestAnalyzer();
-			MultiFieldQueryParser mfqp = new MultiFieldQueryParser(fields, a);
+			MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, a);
 			
 			Query q = mfqp.Parse(qtxt);
 			Assert.AreEqual(expectedRes, q.ToString());
 			
-			q = MultiFieldQueryParser.Parse(qtxt, fields, occur, a);
+			q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, qtxt, fields, occur, a);
 			Assert.AreEqual(expectedRes, q.ToString());
 		}
 		
@@ -75,7 +76,7 @@ namespace Lucene.Net.QueryParsers
 		public virtual void  TestSimple()
 		{
 			System.String[] fields = new System.String[]{"b", "t"};
-			MultiFieldQueryParser mfqp = new MultiFieldQueryParser(fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+			MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			
 			Query q = mfqp.Parse("one");
 			Assert.AreEqual("b:one t:one", q.ToString());
@@ -135,11 +136,11 @@ namespace Lucene.Net.QueryParsers
 		[Test]
 		public virtual void  TestBoostsSimple()
 		{
-			System.Collections.IDictionary boosts = new System.Collections.Hashtable();
+			IDictionary<string, float> boosts = new Dictionary<string,float>();
 			boosts["b"] = (float) 5;
 			boosts["t"] = (float) 10;
 			System.String[] fields = new System.String[]{"b", "t"};
-			MultiFieldQueryParser mfqp = new MultiFieldQueryParser(fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), boosts);
+            MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), boosts);
 			
 			
 			//Check for simple
@@ -167,25 +168,25 @@ namespace Lucene.Net.QueryParsers
 		{
 			System.String[] fields = new System.String[]{"b", "t"};
 			System.String[] queries = new System.String[]{"one", "two"};
-			Query q = MultiFieldQueryParser.Parse(queries, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+            Query q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Assert.AreEqual("b:one t:two", q.ToString());
 			
 			System.String[] queries2 = new System.String[]{"+one", "+two"};
-			q = MultiFieldQueryParser.Parse(queries2, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+            q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries2, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Assert.AreEqual("(+b:one) (+t:two)", q.ToString());
 			
 			System.String[] queries3 = new System.String[]{"one", "+two"};
-			q = MultiFieldQueryParser.Parse(queries3, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+            q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries3, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Assert.AreEqual("b:one (+t:two)", q.ToString());
 			
 			System.String[] queries4 = new System.String[]{"one +more", "+two"};
-			q = MultiFieldQueryParser.Parse(queries4, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+            q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries4, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Assert.AreEqual("(b:one +b:more) (+t:two)", q.ToString());
 			
 			System.String[] queries5 = new System.String[]{"blah"};
 			try
 			{
-				q = MultiFieldQueryParser.Parse(queries5, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+                q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries5, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 				Assert.Fail();
 			}
 			catch (System.ArgumentException e)
@@ -197,11 +198,11 @@ namespace Lucene.Net.QueryParsers
 			TestQueryParser.QPTestAnalyzer stopA = new TestQueryParser.QPTestAnalyzer();
 			
 			System.String[] queries6 = new System.String[]{"((+stop))", "+((stop))"};
-			q = MultiFieldQueryParser.Parse(queries6, fields, stopA);
+            q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries6, fields, stopA);
 			Assert.AreEqual("", q.ToString());
 			
 			System.String[] queries7 = new System.String[]{"one ((+stop)) +more", "+((stop)) +two"};
-			q = MultiFieldQueryParser.Parse(queries7, fields, stopA);
+            q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries7, fields, stopA);
 			Assert.AreEqual("(b:one +b:more) (+t:two)", q.ToString());
 		}
 		
@@ -210,16 +211,16 @@ namespace Lucene.Net.QueryParsers
 		{
 			System.String[] fields = new System.String[]{"b", "t"};
 			BooleanClause.Occur[] flags = new BooleanClause.Occur[]{BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
-			Query q = MultiFieldQueryParser.Parse("one", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+			Query q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "one", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Assert.AreEqual("+b:one -t:one", q.ToString());
 			
-			q = MultiFieldQueryParser.Parse("one two", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+			q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "one two", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Assert.AreEqual("+(b:one b:two) -(t:one t:two)", q.ToString());
 			
 			try
 			{
 				BooleanClause.Occur[] flags2 = new BooleanClause.Occur[]{BooleanClause.Occur.MUST};
-				q = MultiFieldQueryParser.Parse("blah", fields, flags2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+				q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "blah", fields, flags2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 				Assert.Fail();
 			}
 			catch (System.ArgumentException e)
@@ -234,18 +235,18 @@ namespace Lucene.Net.QueryParsers
 			System.String[] fields = new System.String[]{"b", "t"};
 			//int[] flags = {MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD};
 			BooleanClause.Occur[] flags = new BooleanClause.Occur[]{BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
-			MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+            MultiFieldQueryParser parser = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			
-			Query q = MultiFieldQueryParser.Parse("one", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); //, fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+			Query q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "one", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); //, fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Assert.AreEqual("+b:one -t:one", q.ToString());
 			
-			q = MultiFieldQueryParser.Parse("one two", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+			q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "one two", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Assert.AreEqual("+(b:one b:two) -(t:one t:two)", q.ToString());
 			
 			try
 			{
 				BooleanClause.Occur[] flags2 = new BooleanClause.Occur[]{BooleanClause.Occur.MUST};
-				q = MultiFieldQueryParser.Parse("blah", fields, flags2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+				q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "blah", fields, flags2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 				Assert.Fail();
 			}
 			catch (System.ArgumentException e)
@@ -260,13 +261,13 @@ namespace Lucene.Net.QueryParsers
 			System.String[] queries = new System.String[]{"one", "two", "three"};
 			System.String[] fields = new System.String[]{"f1", "f2", "f3"};
 			BooleanClause.Occur[] flags = new BooleanClause.Occur[]{BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD};
-			Query q = MultiFieldQueryParser.Parse(queries, fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+            Query q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries, fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Assert.AreEqual("+f1:one -f2:two f3:three", q.ToString());
 			
 			try
 			{
 				BooleanClause.Occur[] flags2 = new BooleanClause.Occur[]{BooleanClause.Occur.MUST};
-				q = MultiFieldQueryParser.Parse(queries, fields, flags2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+                q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries, fields, flags2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 				Assert.Fail();
 			}
 			catch (System.ArgumentException e)
@@ -281,13 +282,13 @@ namespace Lucene.Net.QueryParsers
 			System.String[] queries = new System.String[]{"one", "two"};
 			System.String[] fields = new System.String[]{"b", "t"};
 			BooleanClause.Occur[] flags = new BooleanClause.Occur[]{BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
-			Query q = MultiFieldQueryParser.Parse(queries, fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+            Query q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries, fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Assert.AreEqual("+b:one -t:two", q.ToString());
 			
 			try
 			{
 				BooleanClause.Occur[] flags2 = new BooleanClause.Occur[]{BooleanClause.Occur.MUST};
-				q = MultiFieldQueryParser.Parse(queries, fields, flags2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+                q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries, fields, flags2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 				Assert.Fail();
 			}
 			catch (System.ArgumentException e)
@@ -300,7 +301,7 @@ namespace Lucene.Net.QueryParsers
 		public virtual void  TestAnalyzerReturningNull()
 		{
 			System.String[] fields = new System.String[]{"f1", "f2", "f3"};
-			MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, new AnalyzerReturningNull());
+            MultiFieldQueryParser parser = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, new AnalyzerReturningNull());
 			Query q = parser.Parse("bla AND blo");
 			Assert.AreEqual("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.ToString());
 			// the following queries are not affected as their terms are not analyzed anyway:
@@ -323,10 +324,10 @@ namespace Lucene.Net.QueryParsers
 			iw.AddDocument(doc);
 			iw.Close();
 			
-			MultiFieldQueryParser mfqp = new MultiFieldQueryParser(new System.String[]{"body"}, analyzer);
+			MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, new []{"body"}, analyzer);
 			mfqp.SetDefaultOperator(QueryParser.Operator.AND);
 			Query q = mfqp.Parse("the footest");
-			IndexSearcher is_Renamed = new IndexSearcher(ramDir);
+			IndexSearcher is_Renamed = new IndexSearcher(ramDir, true);
 			ScoreDoc[] hits = is_Renamed.Search(q, null, 1000).ScoreDocs;
 			Assert.AreEqual(1, hits.Length);
 			is_Renamed.Close();

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/QueryParser/TestQueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/QueryParser/TestQueryParser.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/QueryParser/TestQueryParser.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/QueryParser/TestQueryParser.cs Wed Nov 16 05:24:27 2011
@@ -16,6 +16,7 @@
  */
 
 using System;
+using System.Collections.Generic;
 using Lucene.Net.Support;
 using NUnit.Framework;
 
@@ -90,7 +91,8 @@ namespace Lucene.Net.QueryParsers
 				}
 				
 			}
-			internal AnonymousClassQueryParser(int[] type, TestQueryParser enclosingInstance, System.String Param1, Lucene.Net.Analysis.Analyzer Param2):base(Param1, Param2)
+            internal AnonymousClassQueryParser(int[] type, TestQueryParser enclosingInstance, System.String Param1, Lucene.Net.Analysis.Analyzer Param2)
+                : base(Version.LUCENE_CURRENT, Param1, Param2)
 			{
 				InitBlock(type, enclosingInstance);
 			}
@@ -113,14 +115,14 @@ namespace Lucene.Net.QueryParsers
 				return base.GetFieldQuery(field, queryText);
 			}
 		}
-		
-		/*public TestQueryParser(System.String name):base(name, dataTestWithDifferentLocals)
-		{
-		}*/
+
+        public TestQueryParser(System.String name):base(name, dataTestWithDifferentLocals) // TODO: was commented out
+        {
+        }
 		
 		public static Analyzer qpAnalyzer = new QPTestAnalyzer();
 		
-		public class QPTestFilter:TokenFilter
+		public class QPTestFilter : TokenFilter
 		{
 			internal TermAttribute termAtt;
 			internal OffsetAttribute offsetAtt;
@@ -179,7 +181,7 @@ namespace Lucene.Net.QueryParsers
 		
 		public class QPTestParser:QueryParser
 		{
-			public QPTestParser(System.String f, Analyzer a):base(f, a)
+			public QPTestParser(System.String f, Analyzer a):base(Version.LUCENE_CURRENT, f, a)
 			{
 			}
 			
@@ -207,7 +209,7 @@ namespace Lucene.Net.QueryParsers
 		{
 			if (a == null)
 				a = new SimpleAnalyzer();
-			QueryParser qp = new QueryParser("field", a);
+            QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", a);
 			qp.SetDefaultOperator(QueryParser.OR_OPERATOR);
 			return qp;
 		}
@@ -279,7 +281,7 @@ namespace Lucene.Net.QueryParsers
 		{
 			if (a == null)
 				a = new SimpleAnalyzer();
-			QueryParser qp = new QueryParser("field", a);
+            QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", a);
 			qp.SetDefaultOperator(QueryParser.AND_OPERATOR);
 			return qp.Parse(query);
 		}
@@ -364,8 +366,8 @@ namespace Lucene.Net.QueryParsers
 			AssertQueryEquals("((a OR b) AND NOT c) OR d", null, "(+(a b) -c) d");
 			AssertQueryEquals("+(apple \"steve jobs\") -(foo bar baz)", null, "+(apple \"steve jobs\") -(foo bar baz)");
 			AssertQueryEquals("+title:(dog OR cat) -author:\"bob dole\"", null, "+(title:dog title:cat) -author:\"bob dole\"");
-			
-			QueryParser qp = new QueryParser("field", new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+
+            QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			// make sure OR is the default:
 			Assert.AreEqual(QueryParser.OR_OPERATOR, qp.GetDefaultOperator());
 			qp.SetDefaultOperator(QueryParser.AND_OPERATOR);
@@ -537,8 +539,8 @@ namespace Lucene.Net.QueryParsers
 		{
 			AssertQueryEquals("[ a TO z]", null, "[a TO z]");
 			Assert.AreEqual(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((TermRangeQuery) GetQuery("[ a TO z]", null)).GetRewriteMethod());
-			
-			QueryParser qp = new QueryParser("field", new SimpleAnalyzer());
+
+            QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new SimpleAnalyzer());
 			qp.SetMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
 			Assert.AreEqual(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE, ((TermRangeQuery) qp.Parse("[ a TO z]")).GetRewriteMethod());
 			
@@ -559,12 +561,12 @@ namespace Lucene.Net.QueryParsers
 			RAMDirectory ramDir = new RAMDirectory();
 			IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			Document doc = new Document();
-			doc.Add(new Field("content", "\u0633\u0627\u0628", Field.Store.YES, Field.Index.UN_TOKENIZED));
+			doc.Add(new Field("content", "\u0633\u0627\u0628", Field.Store.YES, Field.Index.NOT_ANALYZED));
 			iw.AddDocument(doc);
 			iw.Close();
-			IndexSearcher is_Renamed = new IndexSearcher(ramDir);
-			
-			QueryParser qp = new QueryParser("content", new WhitespaceAnalyzer());
+		    IndexSearcher is_Renamed = new IndexSearcher(ramDir, true);
+
+            QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "content", new WhitespaceAnalyzer());
 			
 			// Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in
 			// RuleBasedCollator.  However, the Arabic Locale seems to order the Farsi
@@ -672,7 +674,7 @@ namespace Lucene.Net.QueryParsers
 			System.String defaultField = "default";
 			System.String monthField = "month";
 			System.String hourField = "hour";
-			QueryParser qp = new QueryParser("field", new SimpleAnalyzer());
+            QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new SimpleAnalyzer());
 			
 			// Don't set any date resolution and verify if DateField is used
 			System.DateTime tempAux = endDateExpected;
@@ -882,10 +884,10 @@ namespace Lucene.Net.QueryParsers
 		[Test]
 		public virtual void  TestBoost()
 		{
-			System.Collections.Hashtable stopWords = new System.Collections.Hashtable(1);
-			CollectionsHelper.AddIfNotContains(stopWords, "on");
-			StandardAnalyzer oneStopAnalyzer = new StandardAnalyzer(stopWords);
-			QueryParser qp = new QueryParser("field", oneStopAnalyzer);
+			HashSet<string> stopWords = new HashSet<string>();
+		    stopWords.Add("on");
+            StandardAnalyzer oneStopAnalyzer = new StandardAnalyzer(Version.LUCENE_CURRENT, stopWords);
+            QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", oneStopAnalyzer);
 			Query q = qp.Parse("on^1.0");
 			Assert.IsNotNull(q);
 			q = qp.Parse("\"hello\"^2.0");
@@ -896,8 +898,8 @@ namespace Lucene.Net.QueryParsers
 			Assert.AreEqual(q.GetBoost(), (float) 2.0, (float) 0.5);
 			q = qp.Parse("\"on\"^1.0");
 			Assert.IsNotNull(q);
-			
-			QueryParser qp2 = new QueryParser("field", new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+
+            QueryParser qp2 = new QueryParser(Version.LUCENE_CURRENT, "field", new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			q = qp2.Parse("the^3");
 			// "the" is a stop word so the result is an empty query:
 			Assert.IsNotNull(q);
@@ -964,7 +966,7 @@ namespace Lucene.Net.QueryParsers
 			BooleanQuery.SetMaxClauseCount(2);
 			try
 			{
-				QueryParser qp = new QueryParser("field", new WhitespaceAnalyzer());
+                QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new WhitespaceAnalyzer());
 				qp.Parse("one two three");
 				Assert.Fail("ParseException expected due to too many boolean clauses");
 			}
@@ -978,7 +980,7 @@ namespace Lucene.Net.QueryParsers
 		[Test]
 		public virtual void  TestPrecedence()
 		{
-			QueryParser qp = new QueryParser("field", new WhitespaceAnalyzer());
+            QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new WhitespaceAnalyzer());
 			Query query1 = qp.Parse("A AND B OR C AND D");
 			Query query2 = qp.Parse("+A +B +C +D");
 			Assert.AreEqual(query1, query2);
@@ -992,7 +994,7 @@ namespace Lucene.Net.QueryParsers
 			AddDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
 			AddDateDoc("b", 2005, 12, 4, 22, 15, 0, iw);
 			iw.Close();
-			IndexSearcher is_Renamed = new IndexSearcher(ramDir);
+		    IndexSearcher is_Renamed = new IndexSearcher(ramDir, true);
 			AssertHits(1, "[12/1/2005 TO 12/3/2005]", is_Renamed);
 			AssertHits(2, "[12/1/2005 TO 12/4/2005]", is_Renamed);
 			AssertHits(1, "[12/3/2005 TO 12/4/2005]", is_Renamed);
@@ -1047,7 +1049,9 @@ namespace Lucene.Net.QueryParsers
 		[Test]
 		public virtual void  TestStopwords()
 		{
-			QueryParser qp = new QueryParser("a", new StopAnalyzer(new System.String[]{"the", "foo"}));
+		    QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "a",
+		                                     new StopAnalyzer(Version.LUCENE_CURRENT,
+		                                                      StopFilter.MakeStopSet(new[] {"the", "foo"})));
 			Query result = qp.Parse("a:the OR a:foo");
 			Assert.IsNotNull(result, "result is null and it shouldn't be");
 			Assert.IsTrue(result is BooleanQuery, "result is not a BooleanQuery");
@@ -1061,40 +1065,33 @@ namespace Lucene.Net.QueryParsers
 			System.Console.Out.WriteLine("Result: " + result);
 			Assert.IsTrue(((BooleanQuery) result).Clauses().Count == 2, ((BooleanQuery) result).Clauses().Count + " does not equal: " + 2);
 		}
-		
-		[Test]
-		public virtual void  TestPositionIncrement()
-		{
-			bool dflt = StopFilter.GetEnablePositionIncrementsDefault();
-			StopFilter.SetEnablePositionIncrementsDefault(true);
-			try
-			{
-				QueryParser qp = new QueryParser("a", new StopAnalyzer(new System.String[]{"the", "in", "are", "this"}));
-				qp.SetEnablePositionIncrements(true);
-				System.String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\"";
-				//               0         2                      5           7  8
-				int[] expectedPositions = new int[]{1, 3, 4, 6, 9};
-				PhraseQuery pq = (PhraseQuery) qp.Parse(qtxt);
-				//System.out.println("Query text: "+qtxt);
-				//System.out.println("Result: "+pq);
-				Term[] t = pq.GetTerms();
-				int[] pos = pq.GetPositions();
-				for (int i = 0; i < t.Length; i++)
-				{
-					//System.out.println(i+". "+t[i]+"  pos: "+pos[i]);
-					Assert.AreEqual(expectedPositions[i], pos[i], "term " + i + " = " + t[i] + " has wrong term-position!");
-				}
-			}
-			finally
-			{
-				StopFilter.SetEnablePositionIncrementsDefault(dflt);
-			}
-		}
-		
-		[Test]
+
+        [Test]
+        public virtual void TestPositionIncrement()
+        {
+            QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "a",
+                                             new StopAnalyzer(Version.LUCENE_CURRENT,
+                                                              StopFilter.MakeStopSet(new[] {"the", "in", "are", "this"})));
+            qp.SetEnablePositionIncrements(true);
+            System.String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\"";
+            //               0         2                      5           7  8
+            int[] expectedPositions = new int[] {1, 3, 4, 6, 9};
+            PhraseQuery pq = (PhraseQuery) qp.Parse(qtxt);
+            //System.out.println("Query text: "+qtxt);
+            //System.out.println("Result: "+pq);
+            Term[] t = pq.GetTerms();
+            int[] pos = pq.GetPositions();
+            for (int i = 0; i < t.Length; i++)
+            {
+                //System.out.println(i+". "+t[i]+"  pos: "+pos[i]);
+                Assert.AreEqual(expectedPositions[i], pos[i], "term " + i + " = " + t[i] + " has wrong term-position!");
+            }
+        }
+
+	    [Test]
 		public virtual void  TestMatchAllDocs()
 		{
-			QueryParser qp = new QueryParser("field", new WhitespaceAnalyzer());
+            QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new WhitespaceAnalyzer());
 			Assert.AreEqual(new MatchAllDocsQuery(), qp.Parse("*:*"));
 			Assert.AreEqual(new MatchAllDocsQuery(), qp.Parse("(*:*)"));
 			BooleanQuery bq = (BooleanQuery) qp.Parse("+*:* -*:*");
@@ -1104,7 +1101,7 @@ namespace Lucene.Net.QueryParsers
 		
 		private void  AssertHits(int expected, System.String query, IndexSearcher is_Renamed)
 		{
-			QueryParser qp = new QueryParser("date", new WhitespaceAnalyzer());
+            QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "date", new WhitespaceAnalyzer());
 			qp.SetLocale(new System.Globalization.CultureInfo("en-US"));
 			Query q = qp.Parse(query);
 			ScoreDoc[] hits = is_Renamed.Search(q, null, 1000).ScoreDocs;

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMatchAllDocsQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMatchAllDocsQuery.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMatchAllDocsQuery.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMatchAllDocsQuery.cs Wed Nov 16 05:24:27 2011
@@ -61,8 +61,8 @@ namespace Lucene.Net.Search
 			AddDoc("two", iw, 20f);
 			AddDoc("three four", iw, 300f);
 			iw.Close();
-			
-			IndexReader ir = IndexReader.Open(dir);
+
+		    IndexReader ir = IndexReader.Open(dir, false);
 			IndexSearcher is_Renamed = new IndexSearcher(ir);
 			ScoreDoc[] hits;
 			
@@ -114,7 +114,7 @@ namespace Lucene.Net.Search
 			Assert.AreEqual(2, hits.Length);
 			
 			// test parsable toString()
-			QueryParser qp = new QueryParser("key", analyzer);
+			QueryParser qp = new QueryParser(Util.Version.LUCENE_CURRENT, "key", analyzer);
 			hits = is_Renamed.Search(qp.Parse(new MatchAllDocsQuery().ToString()), null, 1000).ScoreDocs;
 			Assert.AreEqual(2, hits.Length);
 			

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiPhraseQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiPhraseQuery.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiPhraseQuery.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiPhraseQuery.cs Wed Nov 16 05:24:27 2011
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using System.Collections.Generic;
 using NUnit.Framework;
 
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
@@ -33,12 +33,7 @@ using LuceneTestCase = Lucene.Net.Util.L
 namespace Lucene.Net.Search
 {
 	
-	/// <summary> This class tests the MultiPhraseQuery class.
-	/// 
-	/// 
-	/// </summary>
-	/// <version>  $Id: TestMultiPhraseQuery.java 794078 2009-07-14 21:39:22Z markrmiller $
-	/// </version>
+	/// <summary>This class tests the MultiPhraseQuery class.</summary>
     [TestFixture]
 	public class TestMultiPhraseQuery:LuceneTestCase
 	{
@@ -57,8 +52,8 @@ namespace Lucene.Net.Search
 			Add("piccadilly circus", writer);
 			writer.Optimize();
 			writer.Close();
-			
-			IndexSearcher searcher = new IndexSearcher(indexStore);
+
+		    IndexSearcher searcher = new IndexSearcher(indexStore, true);
 			
 			// search for "blueberry pi*":
 			MultiPhraseQuery query1 = new MultiPhraseQuery();
@@ -68,7 +63,7 @@ namespace Lucene.Net.Search
 			query2.Add(new Term("body", "strawberry"));
 			
 			System.Collections.ArrayList termsWithPrefix = new System.Collections.ArrayList();
-			IndexReader ir = IndexReader.Open(indexStore);
+		    IndexReader ir = IndexReader.Open(indexStore, true);
 			
 			// this TermEnum gives "piccadilly", "pie" and "pizza".
 			System.String prefix = "pi";
@@ -156,8 +151,8 @@ namespace Lucene.Net.Search
 			Add("blue raspberry pie", writer);
 			writer.Optimize();
 			writer.Close();
-			
-			IndexSearcher searcher = new IndexSearcher(indexStore);
+
+		    IndexSearcher searcher = new IndexSearcher(indexStore, true);
 			// This query will be equivalent to +body:pie +body:"blue*"
 			BooleanQuery q = new BooleanQuery();
 			q.Add(new TermQuery(new Term("body", "pie")), BooleanClause.Occur.MUST);
@@ -177,12 +172,12 @@ namespace Lucene.Net.Search
 		public virtual void  TestPhrasePrefixWithBooleanQuery()
 		{
 			RAMDirectory indexStore = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(new System.Collections.Hashtable(0)), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(Util.Version.LUCENE_CURRENT, new HashSet<string>()), true, IndexWriter.MaxFieldLength.LIMITED);
 			Add("This is a test", "object", writer);
 			Add("a note", "note", writer);
 			writer.Close();
-			
-			IndexSearcher searcher = new IndexSearcher(indexStore);
+
+		    IndexSearcher searcher = new IndexSearcher(indexStore, true);
 			
 			// This query will be equivalent to +type:note +body:"a t*"
 			BooleanQuery q = new BooleanQuery();

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcher.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcher.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcher.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcher.cs Wed Nov 16 05:24:27 2011
@@ -36,11 +36,7 @@ using LuceneTestCase = Lucene.Net.Util.L
 namespace Lucene.Net.Search
 {
 	
-	/// <summary> Tests {@link MultiSearcher} class.
-	/// 
-	/// </summary>
-	/// <version>  $Id: TestMultiSearcher.java 781130 2009-06-02 19:16:20Z mikemccand $
-	/// </version>
+	/// <summary>Tests {@link MultiSearcher} class.</summary>
     [TestFixture]
 	public class TestMultiSearcher:LuceneTestCase
 	{
@@ -144,14 +140,14 @@ namespace Lucene.Net.Search
 			writerB.Close();
 			
 			// creating the query
-			QueryParser parser = new QueryParser("fulltext", new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+			QueryParser parser = new QueryParser(Util.Version.LUCENE_CURRENT, "fulltext", new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Query query = parser.Parse("handle:1");
 			
 			// building the searchables
 			Searcher[] searchers = new Searcher[2];
 			// VITAL STEP:adding the searcher for the empty index first, before the searcher for the populated index
-			searchers[0] = new IndexSearcher(indexStoreB);
-			searchers[1] = new IndexSearcher(indexStoreA);
+			searchers[0] = new IndexSearcher(indexStoreB, true);
+			searchers[1] = new IndexSearcher(indexStoreA, true);
 			// creating the multiSearcher
 			Searcher mSearcher = GetMultiSearcherInstance(searchers);
 			// performing the search
@@ -180,8 +176,8 @@ namespace Lucene.Net.Search
 			// building the searchables
 			Searcher[] searchers2 = new Searcher[2];
 			// VITAL STEP:adding the searcher for the empty index first, before the searcher for the populated index
-			searchers2[0] = new IndexSearcher(indexStoreB);
-			searchers2[1] = new IndexSearcher(indexStoreA);
+			searchers2[0] = new IndexSearcher(indexStoreB, true);
+			searchers2[1] = new IndexSearcher(indexStoreA, true);
 			// creating the mulitSearcher
 			MultiSearcher mSearcher2 = GetMultiSearcherInstance(searchers2);
 			// performing the same search
@@ -214,7 +210,7 @@ namespace Lucene.Net.Search
 			
 			// deleting the document just added, this will cause a different exception to take place
 			Term term = new Term("id", "doc1");
-			IndexReader readerB = IndexReader.Open(indexStoreB);
+		    IndexReader readerB = IndexReader.Open(indexStoreB, false);
 			readerB.DeleteDocuments(term);
 			readerB.Close();
 			
@@ -226,8 +222,8 @@ namespace Lucene.Net.Search
 			// building the searchables
 			Searcher[] searchers3 = new Searcher[2];
 			
-			searchers3[0] = new IndexSearcher(indexStoreB);
-			searchers3[1] = new IndexSearcher(indexStoreA);
+			searchers3[0] = new IndexSearcher(indexStoreB, true);
+			searchers3[1] = new IndexSearcher(indexStoreA, true);
 			// creating the mulitSearcher
 			Searcher mSearcher3 = GetMultiSearcherInstance(searchers3);
 			// performing the same search
@@ -295,8 +291,8 @@ namespace Lucene.Net.Search
 			InitIndex(ramDirectory1, 10, true, null); // documents with a single token "doc0", "doc1", etc...
 			InitIndex(ramDirectory2, 10, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc...
 			
-			indexSearcher1 = new IndexSearcher(ramDirectory1);
-			indexSearcher2 = new IndexSearcher(ramDirectory2);
+			indexSearcher1 = new IndexSearcher(ramDirectory1, true);
+			indexSearcher2 = new IndexSearcher(ramDirectory2, true);
 			
 			MultiSearcher searcher = GetMultiSearcherInstance(new Searcher[]{indexSearcher1, indexSearcher2});
 			Assert.IsTrue(searcher != null, "searcher is null and it shouldn't be");
@@ -353,8 +349,8 @@ namespace Lucene.Net.Search
 			// First put the documents in the same index
 			InitIndex(ramDirectory1, nDocs, true, null); // documents with a single token "doc0", "doc1", etc...
 			InitIndex(ramDirectory1, nDocs, false, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc...
-			
-			indexSearcher1 = new IndexSearcher(ramDirectory1);
+
+		    indexSearcher1 = new IndexSearcher(ramDirectory1, true);
 			indexSearcher1.SetDefaultFieldSortScoring(true, true);
 			
 			hits = indexSearcher1.Search(query, null, 1000).ScoreDocs;
@@ -382,9 +378,9 @@ namespace Lucene.Net.Search
 			InitIndex(ramDirectory1, nDocs, true, null); // documents with a single token "doc0", "doc1", etc...
 			InitIndex(ramDirectory2, nDocs, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc...
 			
-			indexSearcher1 = new IndexSearcher(ramDirectory1);
+			indexSearcher1 = new IndexSearcher(ramDirectory1, true);
 			indexSearcher1.SetDefaultFieldSortScoring(true, true);
-			indexSearcher2 = new IndexSearcher(ramDirectory2);
+			indexSearcher2 = new IndexSearcher(ramDirectory2, true);
 			indexSearcher2.SetDefaultFieldSortScoring(true, true);
 			
 			Searcher searcher = GetMultiSearcherInstance(new Searcher[]{indexSearcher1, indexSearcher2});
@@ -420,7 +416,7 @@ namespace Lucene.Net.Search
 		{
 			RAMDirectory dir = new RAMDirectory();
 			InitIndex(dir, 10, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc...
-			IndexSearcher srchr = new IndexSearcher(dir);
+		    IndexSearcher srchr = new IndexSearcher(dir, true);
 			MultiSearcher msrchr = GetMultiSearcherInstance(new Searcher[]{srchr});
 			
 			Similarity customSimilarity = new AnonymousClassDefaultSimilarity(this);
@@ -440,7 +436,22 @@ namespace Lucene.Net.Search
 			
 			// The scores from the IndexSearcher and Multisearcher should be the same
 			// if the same similarity is used.
-			Assert.AreEqual(score1, scoreN, 1e-6, "MultiSearcher score must be equal to single esrcher score!");
+			Assert.AreEqual(score1, scoreN, 1e-6, "MultiSearcher score must be equal to single searcher score!");
 		}
+
+        public void TestDocFreq()
+        {
+            RAMDirectory dir1 = new RAMDirectory();
+            RAMDirectory dir2 = new RAMDirectory();
+
+            InitIndex(dir1, 10, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc...
+            InitIndex(dir2, 5, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc...
+            IndexSearcher searcher1 = new IndexSearcher(dir1, true);
+            IndexSearcher searcher2 = new IndexSearcher(dir2, true);
+
+            MultiSearcher multiSearcher = GetMultiSearcherInstance(new Searcher[] { searcher1, searcher2 });
+            Assert.AreEqual(15, multiSearcher.DocFreq(new Term("contents", "x")));
+
+        }
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcherRanking.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcherRanking.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcherRanking.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcherRanking.cs Wed Nov 16 05:24:27 2011
@@ -34,10 +34,7 @@ namespace Lucene.Net.Search
 	
 	/// <summary> Tests {@link MultiSearcher} ranking, i.e. makes sure this bug is fixed:
 	/// http://issues.apache.org/bugzilla/show_bug.cgi?id=31841
-	/// 
 	/// </summary>
-	/// <version>  $Id: TestMultiSearcher.java 150492 2004-09-06 22:01:49Z dnaber $
-	/// </version>
     [TestFixture]
 	public class TestMultiSearcherRanking:LuceneTestCase
 	{
@@ -109,7 +106,7 @@ namespace Lucene.Net.Search
 			// check result hit ranking
 			if (verbose)
 				System.Console.Out.WriteLine("Query: " + queryStr);
-			QueryParser queryParser = new QueryParser(FIELD_NAME, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+			QueryParser queryParser = new QueryParser(Util.Version.LUCENE_CURRENT, FIELD_NAME, new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Query query = queryParser.Parse(queryStr);
 			ScoreDoc[] multiSearcherHits = multiSearcher.Search(query, null, 1000).ScoreDocs;
 			ScoreDoc[] singleSearcherHits = singleSearcher.Search(query, null, 1000).ScoreDocs;
@@ -145,8 +142,8 @@ namespace Lucene.Net.Search
 			iw2.Close();
 			
 			Searchable[] s = new Searchable[2];
-			s[0] = new IndexSearcher(d1);
-			s[1] = new IndexSearcher(d2);
+			s[0] = new IndexSearcher(d1, true);
+            s[1] = new IndexSearcher(d2, true);
 			multiSearcher = new MultiSearcher(s);
 			
 			// create IndexSearcher which contains all documents
@@ -155,7 +152,7 @@ namespace Lucene.Net.Search
 			AddCollection1(iw);
 			AddCollection2(iw);
 			iw.Close();
-			singleSearcher = new IndexSearcher(d);
+            singleSearcher = new IndexSearcher(d, true);
 		}
 		
 		private void  AddCollection1(IndexWriter iw)

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiTermConstantScore.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiTermConstantScore.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiTermConstantScore.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiTermConstantScore.cs Wed Nov 16 05:24:27 2011
@@ -189,8 +189,8 @@ namespace Lucene.Net.Search
 		public virtual void  TestEqualScores()
 		{
 			// NOTE: uses index build in *this* setUp
-			
-			IndexReader reader = IndexReader.Open(small);
+
+            IndexReader reader = IndexReader.Open(small, true);
 			IndexSearcher search = new IndexSearcher(reader);
 			
 			ScoreDoc[] result;
@@ -219,8 +219,8 @@ namespace Lucene.Net.Search
 		public virtual void  TestBoost()
 		{
 			// NOTE: uses index build in *this* setUp
-			
-			IndexReader reader = IndexReader.Open(small);
+
+            IndexReader reader = IndexReader.Open(small, true);
 			IndexSearcher search = new IndexSearcher(reader);
 			
 			// test for correct application of query normalization
@@ -274,8 +274,8 @@ namespace Lucene.Net.Search
 		public virtual void  TestBooleanOrderUnAffected()
 		{
 			// NOTE: uses index build in *this* setUp
-			
-			IndexReader reader = IndexReader.Open(small);
+
+            IndexReader reader = IndexReader.Open(small, true);
 			IndexSearcher search = new IndexSearcher(reader);
 			
 			// first do a regular TermRangeQuery which uses term expansion so
@@ -306,8 +306,8 @@ namespace Lucene.Net.Search
 		public virtual void  TestRangeQueryId()
 		{
 			// NOTE: uses index build in *super* setUp
-			
-			IndexReader reader = IndexReader.Open(signedIndex.index);
+
+            IndexReader reader = IndexReader.Open(signedIndex.index, true);
 			IndexSearcher search = new IndexSearcher(reader);
 			
 			int medId = ((maxId - minId) / 2);
@@ -435,8 +435,8 @@ namespace Lucene.Net.Search
 		public virtual void  TestRangeQueryIdCollating()
 		{
 			// NOTE: uses index build in *super* setUp
-			
-			IndexReader reader = IndexReader.Open(signedIndex.index);
+
+            IndexReader reader = IndexReader.Open(signedIndex.index, true);
 			IndexSearcher search = new IndexSearcher(reader);
 			
 			int medId = ((maxId - minId) / 2);
@@ -520,8 +520,8 @@ namespace Lucene.Net.Search
 		public virtual void  TestRangeQueryRand()
 		{
 			// NOTE: uses index build in *super* setUp
-			
-			IndexReader reader = IndexReader.Open(signedIndex.index);
+
+            IndexReader reader = IndexReader.Open(signedIndex.index, true);
 			IndexSearcher search = new IndexSearcher(reader);
 			
 			System.String minRP = Pad(signedIndex.minR);
@@ -585,7 +585,7 @@ namespace Lucene.Net.Search
 			// NOTE: uses index build in *super* setUp
 			
 			// using the unsigned index because collation seems to ignore hyphens
-			IndexReader reader = IndexReader.Open(unsignedIndex.index);
+            IndexReader reader = IndexReader.Open(unsignedIndex.index, true);
 			IndexSearcher search = new IndexSearcher(reader);
 			
 			System.String minRP = Pad(unsignedIndex.minR);
@@ -659,8 +659,8 @@ namespace Lucene.Net.Search
 			
 			writer.Optimize();
 			writer.Close();
-			
-			IndexReader reader = IndexReader.Open(farsiIndex);
+
+            IndexReader reader = IndexReader.Open(farsiIndex, true);
 			IndexSearcher search = new IndexSearcher(reader);
 			
 			// Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in
@@ -695,14 +695,14 @@ namespace Lucene.Net.Search
 			for (int docnum = 0; docnum < words.Length; ++docnum)
 			{
 				Document doc = new Document();
-				doc.Add(new Field("content", words[docnum], Field.Store.YES, Field.Index.UN_TOKENIZED));
-				doc.Add(new Field("body", "body", Field.Store.YES, Field.Index.UN_TOKENIZED));
+				doc.Add(new Field("content", words[docnum], Field.Store.YES, Field.Index.NOT_ANALYZED));
+                doc.Add(new Field("body", "body", Field.Store.YES, Field.Index.NOT_ANALYZED));
 				writer.AddDocument(doc);
 			}
 			writer.Optimize();
 			writer.Close();
-			
-			IndexReader reader = IndexReader.Open(danishIndex);
+
+            IndexReader reader = IndexReader.Open(danishIndex, true);
 			IndexSearcher search = new IndexSearcher(reader);
 			
 			System.Globalization.CompareInfo c = new System.Globalization.CultureInfo("da" + "-" + "dk").CompareInfo;

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestNot.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestNot.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestNot.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestNot.cs Wed Nov 16 05:24:27 2011
@@ -30,12 +30,7 @@ using LuceneTestCase = Lucene.Net.Util.L
 namespace Lucene.Net.Search
 {
 	
-	/// <summary>Similarity unit test.
-	/// 
-	/// 
-	/// </summary>
-	/// <version>  $Revision: 694004 $
-	/// </version>
+	/// <summary>Similarity unit test.</summary>
     [TestFixture]
 	public class TestNot:LuceneTestCase
 	{		
@@ -52,8 +47,8 @@ namespace Lucene.Net.Search
 			writer.Optimize();
 			writer.Close();
 			
-			Searcher searcher = new IndexSearcher(store);
-			QueryParser parser = new QueryParser("field", new SimpleAnalyzer());
+			Searcher searcher = new IndexSearcher(store, true);
+			QueryParser parser = new QueryParser(Util.Version.LUCENE_CURRENT, "field", new SimpleAnalyzer());
 			Query query = parser.Parse("a NOT b");
 			//System.out.println(query);
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPhraseQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPhraseQuery.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPhraseQuery.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPhraseQuery.cs Wed Nov 16 05:24:27 2011
@@ -102,8 +102,8 @@ namespace Lucene.Net.Search
 			
 			writer.Optimize();
 			writer.Close();
-			
-			searcher = new IndexSearcher(directory);
+
+		    searcher = new IndexSearcher(directory, true);
 			query = new PhraseQuery();
 		}
 		
@@ -236,14 +236,14 @@ namespace Lucene.Net.Search
 		public virtual void  TestPhraseQueryWithStopAnalyzer()
 		{
 			RAMDirectory directory = new RAMDirectory();
-			StopAnalyzer stopAnalyzer = new StopAnalyzer();
+			StopAnalyzer stopAnalyzer = new StopAnalyzer(Util.Version.LUCENE_24);
 			IndexWriter writer = new IndexWriter(directory, stopAnalyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 			Document doc = new Document();
 			doc.Add(new Field("field", "the stop words are here", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			writer.Close();
-			
-			IndexSearcher searcher = new IndexSearcher(directory);
+
+		    IndexSearcher searcher = new IndexSearcher(directory, true);
 			
 			// valid exact phrase query
 			PhraseQuery query = new PhraseQuery();
@@ -254,7 +254,7 @@ namespace Lucene.Net.Search
 			QueryUtils.Check(query, searcher);
 			
 			
-			// currently StopAnalyzer does not leave "holes", so this matches.
+			// StopAnalyzer as of 2.4 does not leave "holes", so this matches.
 			query = new PhraseQuery();
 			query.Add(new Term("field", "words"));
 			query.Add(new Term("field", "here"));
@@ -283,8 +283,8 @@ namespace Lucene.Net.Search
 			
 			writer.Optimize();
 			writer.Close();
-			
-			IndexSearcher searcher = new IndexSearcher(directory);
+
+		    IndexSearcher searcher = new IndexSearcher(directory, true);
 			
 			PhraseQuery phraseQuery = new PhraseQuery();
 			phraseQuery.Add(new Term("source", "marketing"));
@@ -320,8 +320,8 @@ namespace Lucene.Net.Search
 			
 			writer.Optimize();
 			writer.Close();
-			
-			searcher = new IndexSearcher(directory);
+
+		    searcher = new IndexSearcher(directory, true);
 			
 			termQuery = new TermQuery(new Term("contents", "woo"));
 			phraseQuery = new PhraseQuery();
@@ -372,8 +372,8 @@ namespace Lucene.Net.Search
 			
 			writer.Optimize();
 			writer.Close();
-			
-			Searcher searcher = new IndexSearcher(directory);
+
+		    Searcher searcher = new IndexSearcher(directory, true);
 			PhraseQuery query = new PhraseQuery();
 			query.Add(new Term("field", "firstname"));
 			query.Add(new Term("field", "lastname"));
@@ -394,9 +394,8 @@ namespace Lucene.Net.Search
 		[Test]
 		public virtual void  TestToString()
 		{
-			StopAnalyzer analyzer = new StopAnalyzer();
-			StopFilter.SetEnablePositionIncrementsDefault(true);
-			QueryParser qp = new QueryParser("field", analyzer);
+			StopAnalyzer analyzer = new StopAnalyzer(Util.Version.LUCENE_CURRENT);
+			QueryParser qp = new QueryParser(Util.Version.LUCENE_CURRENT, "field", analyzer);
 			qp.SetEnablePositionIncrements(true);
 			PhraseQuery q = (PhraseQuery) qp.Parse("\"this hi this is a test is\"");
 			Assert.AreEqual("field:\"? hi ? ? ? test\"", q.ToString());

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPositionIncrement.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPositionIncrement.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPositionIncrement.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPositionIncrement.cs Wed Nov 16 05:24:27 2011
@@ -16,7 +16,9 @@
  */
 
 using System;
-
+using System.Collections.Generic;
+using Lucene.Net.Analysis;
+using Lucene.Net.Util;
 using NUnit.Framework;
 
 using Analyzer = Lucene.Net.Analysis.Analyzer;
@@ -48,13 +50,8 @@ using SpanTermQuery = Lucene.Net.Search.
 namespace Lucene.Net.Search
 {
 	
-	/// <summary> Term position unit test.
-	/// 
-	/// 
-	/// </summary>
-	/// <version>  $Revision: 806844 $
-	/// </version>
-	public class TestPositionIncrement:BaseTokenStreamTestCase
+	/// <summary>Term position unit test.</summary>
+	public class TestPositionIncrement : LuceneTestCase
 	{
 		private class AnonymousClassAnalyzer:Analyzer
 		{
@@ -134,9 +131,9 @@ namespace Lucene.Net.Search
 			writer.AddDocument(d);
 			writer.Optimize();
 			writer.Close();
-			
-			
-			IndexSearcher searcher = new IndexSearcher(store);
+
+
+		    IndexSearcher searcher = new IndexSearcher(store, true);
 			
 			TermPositions pos = searcher.GetIndexReader().TermPositions(new Term("field", "1"));
 			pos.Next();
@@ -230,7 +227,7 @@ namespace Lucene.Net.Search
 			Assert.AreEqual(0, hits.Length);
 			
 			// should not find "1 2" because there is a gap of 1 in the index
-			QueryParser qp = new QueryParser("field", new StopWhitespaceAnalyzer(false));
+			QueryParser qp = new QueryParser(Util.Version.LUCENE_CURRENT, "field", new StopWhitespaceAnalyzer(false));
 			q = (PhraseQuery) qp.Parse("\"1 2\"");
 			hits = searcher.Search(q, null, 1000).ScoreDocs;
 			Assert.AreEqual(0, hits.Length);
@@ -253,7 +250,7 @@ namespace Lucene.Net.Search
 			Assert.AreEqual(0, hits.Length);
 			
 			// when both qp qnd stopFilter propagate increments, we should find the doc.
-			qp = new QueryParser("field", new StopWhitespaceAnalyzer(true));
+			qp = new QueryParser(Util.Version.LUCENE_CURRENT, "field", new StopWhitespaceAnalyzer(true));
 			qp.SetEnablePositionIncrements(true);
 			q = (PhraseQuery) qp.Parse("\"1 stop 2\"");
 			hits = searcher.Search(q, null, 1000).ScoreDocs;
@@ -271,117 +268,99 @@ namespace Lucene.Net.Search
 			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
 			{
 				TokenStream ts = a.TokenStream(fieldName, reader);
-				return new StopFilter(enablePositionIncrements, ts, new System.String[]{"stop"});
-			}
-		}
-		
-		[Test]
-		public virtual void  TestPayloadsPos0()
-		{
-			for (int x = 0; x < 2; x++)
-			{
-				Directory dir = new MockRAMDirectory();
-				IndexWriter writer = new IndexWriter(dir, new TestPayloadAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-				if (x == 1)
-				{
-					writer.SetAllowMinus1Position();
-				}
-				Document doc = new Document();
-                System.IO.MemoryStream ms = new System.IO.MemoryStream();
-                System.IO.StreamWriter sw = new System.IO.StreamWriter(ms);
-                sw.Write("a a b c d e a f g h i j a b k k");
-                // flush to stream & reset it's position so it can be read
-                sw.Flush();
-                ms.Position = 0;
-                doc.Add(new Field("content", new System.IO.StreamReader(ms)));
-				writer.AddDocument(doc);
-				
-				IndexReader r = writer.GetReader();
-				
-				TermPositions tp = r.TermPositions(new Term("content", "a"));
-				int count = 0;
-				Assert.IsTrue(tp.Next());
-				// "a" occurs 4 times
-				Assert.AreEqual(4, tp.Freq());
-				int expected;
-				if (x == 1)
-				{
-					expected = System.Int32.MaxValue;
-				}
-				else
-				{
-					expected = 0;
-				}
-				Assert.AreEqual(expected, tp.NextPosition());
-				if (x == 1)
-				{
-					continue;
-				}
-				Assert.AreEqual(1, tp.NextPosition());
-				Assert.AreEqual(3, tp.NextPosition());
-				Assert.AreEqual(6, tp.NextPosition());
-				
-				// only one doc has "a"
-				Assert.IsFalse(tp.Next());
-				
-				IndexSearcher is_Renamed = new IndexSearcher(r);
-				
-				SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a"));
-				SpanTermQuery stq2 = new SpanTermQuery(new Term("content", "k"));
-				SpanQuery[] sqs = new SpanQuery[]{stq1, stq2};
-				SpanNearQuery snq = new SpanNearQuery(sqs, 30, false);
-				
-				count = 0;
-				bool sawZero = false;
-				//System.out.println("\ngetPayloadSpans test");
-				Lucene.Net.Search.Spans.Spans pspans = snq.GetSpans(is_Renamed.GetIndexReader());
-				while (pspans.Next())
-				{
-					//System.out.println(pspans.doc() + " - " + pspans.start() + " - "+ pspans.end());
-					System.Collections.Generic.ICollection<byte[]> payloads = pspans.GetPayload();
-					sawZero |= pspans.Start() == 0;
-					for (System.Collections.IEnumerator it = payloads.GetEnumerator(); it.MoveNext(); )
-					{
-						count++;
-						System.Object generatedAux2 = it.Current;
-						//System.out.println(new String((byte[]) it.next()));
-					}
-				}
-				Assert.AreEqual(5, count);
-				Assert.IsTrue(sawZero);
-				
-				//System.out.println("\ngetSpans test");
-				Lucene.Net.Search.Spans.Spans spans = snq.GetSpans(is_Renamed.GetIndexReader());
-				count = 0;
-				sawZero = false;
-				while (spans.Next())
-				{
-					count++;
-					sawZero |= spans.Start() == 0;
-					//System.out.println(spans.doc() + " - " + spans.start() + " - " + spans.end());
-				}
-				Assert.AreEqual(4, count);
-				Assert.IsTrue(sawZero);
-				
-				//System.out.println("\nPayloadSpanUtil test");
-				
-				sawZero = false;
-				PayloadSpanUtil psu = new PayloadSpanUtil(is_Renamed.GetIndexReader());
-				System.Collections.Generic.ICollection<byte[]> pls = psu.GetPayloadsForQuery(snq);
-				count = pls.Count;
-				for (System.Collections.IEnumerator it = pls.GetEnumerator(); it.MoveNext(); )
-				{
-					System.String s = new System.String(System.Text.UTF8Encoding.UTF8.GetChars((byte[]) it.Current));
-					//System.out.println(s);
-					sawZero |= s.Equals("pos: 0");
-				}
-				Assert.AreEqual(5, count);
-				Assert.IsTrue(sawZero);
-				writer.Close();
-				is_Renamed.GetIndexReader().Close();
-				dir.Close();
+			    return new StopFilter(enablePositionIncrements, ts, new CharArraySet(new List<string> {"stop"}, true));
 			}
 		}
+
+        [Test]
+        public virtual void TestPayloadsPos0()
+        {
+            Directory dir = new MockRAMDirectory();
+            IndexWriter writer = new IndexWriter(dir, new TestPayloadAnalyzer(), true,
+                                                 IndexWriter.MaxFieldLength.LIMITED);
+            Document doc = new Document();
+            System.IO.MemoryStream ms = new System.IO.MemoryStream();
+            System.IO.StreamWriter sw = new System.IO.StreamWriter(ms);
+            sw.Write("a a b c d e a f g h i j a b k k");
+            // flush to stream & reset it's position so it can be read
+            sw.Flush();
+            ms.Position = 0;
+            doc.Add(new Field("content", new System.IO.StreamReader(ms)));
+            writer.AddDocument(doc);
+
+            IndexReader r = writer.GetReader();
+
+            TermPositions tp = r.TermPositions(new Term("content", "a"));
+            int count = 0;
+            Assert.IsTrue(tp.Next());
+            // "a" occurs 4 times
+            Assert.AreEqual(4, tp.Freq());
+            int expected = 0;
+            Assert.AreEqual(expected, tp.NextPosition());
+            Assert.AreEqual(1, tp.NextPosition());
+            Assert.AreEqual(3, tp.NextPosition());
+            Assert.AreEqual(6, tp.NextPosition());
+
+            // only one doc has "a"
+            Assert.IsFalse(tp.Next());
+
+            IndexSearcher is_Renamed = new IndexSearcher(r);
+
+            SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a"));
+            SpanTermQuery stq2 = new SpanTermQuery(new Term("content", "k"));
+            SpanQuery[] sqs = new SpanQuery[] {stq1, stq2};
+            SpanNearQuery snq = new SpanNearQuery(sqs, 30, false);
+
+            count = 0;
+            bool sawZero = false;
+            //System.out.println("\ngetPayloadSpans test");
+            Lucene.Net.Search.Spans.Spans pspans = snq.GetSpans(is_Renamed.GetIndexReader());
+            while (pspans.Next())
+            {
+                //System.out.println(pspans.doc() + " - " + pspans.start() + " - "+ pspans.end());
+                System.Collections.Generic.ICollection<byte[]> payloads = pspans.GetPayload();
+                sawZero |= pspans.Start() == 0;
+                for (System.Collections.IEnumerator it = payloads.GetEnumerator(); it.MoveNext();)
+                {
+                    count++;
+                    System.Object generatedAux2 = it.Current;
+                    //System.out.println(new String((byte[]) it.next()));
+                }
+            }
+            Assert.AreEqual(5, count);
+            Assert.IsTrue(sawZero);
+
+            //System.out.println("\ngetSpans test");
+            Lucene.Net.Search.Spans.Spans spans = snq.GetSpans(is_Renamed.GetIndexReader());
+            count = 0;
+            sawZero = false;
+            while (spans.Next())
+            {
+                count++;
+                sawZero |= spans.Start() == 0;
+                //System.out.println(spans.doc() + " - " + spans.start() + " - " + spans.end());
+            }
+            Assert.AreEqual(4, count);
+            Assert.IsTrue(sawZero);
+
+            //System.out.println("\nPayloadSpanUtil test");
+
+            sawZero = false;
+            PayloadSpanUtil psu = new PayloadSpanUtil(is_Renamed.GetIndexReader());
+            System.Collections.Generic.ICollection<byte[]> pls = psu.GetPayloadsForQuery(snq);
+            count = pls.Count;
+            for (System.Collections.IEnumerator it = pls.GetEnumerator(); it.MoveNext();)
+            {
+                System.String s = new System.String(System.Text.UTF8Encoding.UTF8.GetChars((byte[]) it.Current));
+                //System.out.println(s);
+                sawZero |= s.Equals("pos: 0");
+            }
+            Assert.AreEqual(5, count);
+            Assert.IsTrue(sawZero);
+            writer.Close();
+            is_Renamed.GetIndexReader().Close();
+            dir.Close();
+        }
 	}
 	
 	class TestPayloadAnalyzer:Analyzer

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSimpleExplanations.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSimpleExplanations.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSimpleExplanations.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSimpleExplanations.cs Wed Nov 16 05:24:27 2011
@@ -426,12 +426,12 @@ namespace Lucene.Net.Search
 			writerB.AddDocument(lDoc3);
 			writerB.Close();
 			
-			QueryParser parser = new QueryParser("fulltext", new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
+			QueryParser parser = new QueryParser(Util.Version.LUCENE_CURRENT, "fulltext", new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			Query query = parser.Parse("handle:1");
 			
 			Searcher[] searchers = new Searcher[2];
-			searchers[0] = new IndexSearcher(indexStoreB);
-			searchers[1] = new IndexSearcher(indexStoreA);
+			searchers[0] = new IndexSearcher(indexStoreB, true);
+            searchers[1] = new IndexSearcher(indexStoreA, true);
 			Searcher mSearcher = new MultiSearcher(searchers);
 			ScoreDoc[] hits = mSearcher.Search(query, null, 1000).ScoreDocs;