You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by do...@apache.org on 2009/07/29 20:04:24 UTC

svn commit: r798995 [29/35] - in /incubator/lucene.net/trunk/C#/src: Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Analysis/Standard/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/QueryParser/ Lucene.Net/Search/ Lucene.Net/Search/Function/ Lucene.Net...

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentMerger.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestSegmentMerger.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentMerger.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentMerger.cs Wed Jul 29 18:04:12 2009
@@ -110,7 +110,7 @@
 			Assert.IsTrue(termDocs != null);
 			Assert.IsTrue(termDocs.Next() == true);
 			
-			System.Collections.ICollection stored = mergedReader.GetFieldNames(IndexReader.FieldOption.INDEXED_WITH_TERMVECTOR);
+			System.Collections.Generic.ICollection<string> stored = mergedReader.GetFieldNames(IndexReader.FieldOption.INDEXED_WITH_TERMVECTOR);
 			Assert.IsTrue(stored != null);
 			//System.out.println("stored size: " + stored.size());
 			Assert.IsTrue(stored.Count == 4, "We do not have 4 fields that were indexed with term vector");

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestSegmentReader.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentReader.cs Wed Jul 29 18:04:12 2009
@@ -81,7 +81,6 @@
 		[Test]
 		public virtual void  TestDocument()
 		{
-			int i = 1;
 			Assert.IsTrue(reader.NumDocs() == 1);
 			Assert.IsTrue(reader.MaxDoc() >= 1);
 			Lucene.Net.Documents.Document result = reader.Document(0);
@@ -124,7 +123,7 @@
 		[Test]
 		public virtual void  TestGetFieldNameVariations()
 		{
-			System.Collections.ICollection result = reader.GetFieldNames(IndexReader.FieldOption.ALL);
+			System.Collections.Generic.ICollection<string> result = reader.GetFieldNames(IndexReader.FieldOption.ALL);
 			Assert.IsTrue(result != null);
 			Assert.IsTrue(result.Count == DocHelper.all.Count);
 			for (System.Collections.IEnumerator iter = result.GetEnumerator(); iter.MoveNext(); )
@@ -271,4 +270,4 @@
 			TestTermVectors();
 		}
 	}
-}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermDocs.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestSegmentTermDocs.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermDocs.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermDocs.cs Wed Jul 29 18:04:12 2009
@@ -130,7 +130,7 @@
 		public virtual void  TestSkipTo(int indexDivisor)
 		{
 			Directory dir = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
 			Term ta = new Term("content", "aaa");
 			for (int i = 0; i < 10; i++)
@@ -290,7 +290,7 @@
 		private void  AddDoc(IndexWriter writer, System.String value_Renamed)
 		{
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-			doc.Add(new Field("content", value_Renamed, Field.Store.NO, Field.Index.TOKENIZED));
+			doc.Add(new Field("content", value_Renamed, Field.Store.NO, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 		}
 	}

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermEnum.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestSegmentTermEnum.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermEnum.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermEnum.cs Wed Jul 29 18:04:12 2009
@@ -42,7 +42,7 @@
 		{
 			IndexWriter writer = null;
 			
-			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
 			// add 100 documents with term : aaa
 			// add 100 documents with terms: aaa bbb
@@ -59,7 +59,7 @@
 			VerifyDocFreq();
 			
 			// merge segments by optimizing the index
-			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
+            writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
 			writer.Optimize();
 			writer.Close();
 			
@@ -71,7 +71,7 @@
 		public virtual void  TestPrevTermAtEnd()
 		{
 			Directory dir = new MockRAMDirectory();
-			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			AddDoc(writer, "aaa bbb");
 			writer.Close();
 			IndexReader reader = IndexReader.Open(dir);
@@ -123,7 +123,7 @@
 		private void  AddDoc(IndexWriter writer, System.String value_Renamed)
 		{
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-			doc.Add(new Field("content", value_Renamed, Field.Store.NO, Field.Index.TOKENIZED));
+			doc.Add(new Field("content", value_Renamed, Field.Store.NO, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 		}
 	}

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestStressIndexing.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestStressIndexing.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestStressIndexing.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestStressIndexing.cs Wed Jul 29 18:04:12 2009
@@ -19,14 +19,12 @@
 
 using NUnit.Framework;
 
-using Lucene.Net.Documents;
-using Lucene.Net.Index;
-using Lucene.Net.QueryParsers;
-using Lucene.Net.Store;
 using Lucene.Net.Util;
+using Lucene.Net.Store;
+using Lucene.Net.Documents;
 using Lucene.Net.Analysis;
 using Lucene.Net.Search;
-using Searchable = Lucene.Net.Search.Searchable;
+using Lucene.Net.QueryParsers;
 
 namespace Lucene.Net.Index
 {
@@ -67,7 +65,8 @@
 				}
 				catch (System.Exception e)
 				{
-					System.Console.Out.WriteLine(e.StackTrace);
+                    System.Console.Out.WriteLine(System.Threading.Thread.CurrentThread + ": exc");
+                    System.Console.Out.WriteLine(e.StackTrace);
 					failed = true;
 				}
 			}
@@ -99,8 +98,8 @@
 				{
 					Document d = new Document();
 					int n = Lucene.Net.Index.TestStressIndexing.RANDOM.Next();
-					d.Add(new Field("id", System.Convert.ToString(nextID++), Field.Store.YES, Field.Index.UN_TOKENIZED));
-					d.Add(new Field("contents", English.IntToEnglish(n), Field.Store.NO, Field.Index.TOKENIZED));
+					d.Add(new Field("id", System.Convert.ToString(nextID++), Field.Store.YES, Field.Index.NOT_ANALYZED));
+					d.Add(new Field("contents", English.IntToEnglish(n), Field.Store.NO, Field.Index.ANALYZED));
 					writer.AddDocument(d);
 				}
 				
@@ -140,8 +139,9 @@
 			IndexWriter modifier = new IndexWriter(directory, autoCommit, ANALYZER, true);
 			
 			modifier.SetMaxBufferedDocs(10);
-			
+
 			TimedThread[] threads = new TimedThread[4];
+            int numThread = 0;
 			
 			if (mergeScheduler != null)
 				modifier.SetMergeScheduler(mergeScheduler);
@@ -149,34 +149,32 @@
 			// One modifier that writes 10 docs then removes 5, over
 			// and over:
 			IndexerThread indexerThread = new IndexerThread(modifier, threads);
-			threads[0] = indexerThread;
+			threads[numThread++] = indexerThread;
 			indexerThread.Start();
-			
-			IndexerThread indexerThread2 = new IndexerThread(modifier, threads);
-			threads[2] = indexerThread2;
-			indexerThread2.Start();
-			
-			// Two searchers that constantly just re-instantiate the
-			// searcher:
-			SearcherThread searcherThread1 = new SearcherThread(directory, threads);
-			threads[3] = searcherThread1;
-			searcherThread1.Start();
-			
-			SearcherThread searcherThread2 = new SearcherThread(directory, threads);
-			threads[3] = searcherThread2;
-			searcherThread2.Start();
-			
-			indexerThread.Join();
-			indexerThread2.Join();
-			searcherThread1.Join();
-			searcherThread2.Join();
-			
+
+            IndexerThread indexerThread2 = new IndexerThread(modifier, threads);
+            threads[numThread++] = indexerThread2;
+            indexerThread2.Start();
+
+            // Two searchers that constantly just re-instantiate the
+            // searcher:
+            SearcherThread searcherThread1 = new SearcherThread(directory, threads);
+            threads[numThread++] = searcherThread1;
+            searcherThread1.Start();
+
+            SearcherThread searcherThread2 = new SearcherThread(directory, threads);
+            threads[numThread++] = searcherThread2;
+            searcherThread2.Start();
+
+            for (int i = 0; i < threads.Length; i++)
+                //threads[i].Join();
+                if (threads[i] != null) threads[i].Join();
+
 			modifier.Close();
-			
-			Assert.IsTrue(!indexerThread.failed, "hit unexpected exception in indexer");
-			Assert.IsTrue(!indexerThread2.failed, "hit unexpected exception in indexer2");
-			Assert.IsTrue(!searcherThread1.failed, "hit unexpected exception in search1");
-			Assert.IsTrue(!searcherThread2.failed, "hit unexpected exception in search2");
+
+            for (int i = 0; i < threads.Length; i++)
+                //Assert.IsTrue(!((TimedThread)threads[i]).failed);
+                if (threads[i] != null) Assert.IsTrue(!((TimedThread)threads[i]).failed);
 			
 			//System.out.println("    Writer: " + indexerThread.count + " iterations");
 			//System.out.println("Searcher 1: " + searcherThread1.count + " searchers created");
@@ -190,40 +188,44 @@
 		[Test]
 		public virtual void  TestStressIndexAndSearching()
 		{
-			
-			// RAMDir
-			Directory directory = new MockRAMDirectory();
-			RunStressTest(directory, true, null);
-			directory.Close();
-			
-			// FSDir
-			System.String tempDir = System.IO.Path.GetTempPath();
-			System.IO.FileInfo dirPath = new System.IO.FileInfo(tempDir + "\\" + "lucene.test.stress");
-			directory = FSDirectory.GetDirectory(dirPath);
-			RunStressTest(directory, true, null);
-			directory.Close();
-			
-			// With ConcurrentMergeScheduler, in RAMDir
-			directory = new MockRAMDirectory();
-			RunStressTest(directory, true, new ConcurrentMergeScheduler());
-			directory.Close();
-			
-			// With ConcurrentMergeScheduler, in FSDir
-			directory = FSDirectory.GetDirectory(dirPath);
-			RunStressTest(directory, true, new ConcurrentMergeScheduler());
-			directory.Close();
-			
-			// With ConcurrentMergeScheduler and autoCommit=false, in RAMDir
-			directory = new MockRAMDirectory();
-			RunStressTest(directory, false, new ConcurrentMergeScheduler());
-			directory.Close();
-			
-			// With ConcurrentMergeScheduler and autoCommit=false, in FSDir
-			directory = FSDirectory.GetDirectory(dirPath);
-			RunStressTest(directory, false, new ConcurrentMergeScheduler());
-			directory.Close();
-			
-			_TestUtil.RmDir(dirPath);
+            ////for (int i = 0; i < 10; i++)
+            ////{
+            //// RAMDir
+            //Directory directory = new MockRAMDirectory();
+            //RunStressTest(directory, true, null);
+            //directory.Close();
+Directory directory;
+
+            // FSDir
+            System.String tempDir = System.IO.Path.GetTempPath();
+            System.IO.FileInfo dirPath = new System.IO.FileInfo(tempDir + "\\" + "lucene.test.stress");
+            directory = FSDirectory.GetDirectory(dirPath);
+            RunStressTest(directory, true, null);
+            directory.Close();
+
+//System.Console.WriteLine("Index Path: {0}", dirPath);
+
+            //// With ConcurrentMergeScheduler, in RAMDir
+            //directory = new MockRAMDirectory();
+            //RunStressTest(directory, true, new ConcurrentMergeScheduler());
+            //directory.Close();
+
+            // With ConcurrentMergeScheduler, in FSDir
+            directory = FSDirectory.GetDirectory(dirPath);
+            RunStressTest(directory, true, new ConcurrentMergeScheduler());
+            directory.Close();
+
+            //// With ConcurrentMergeScheduler and autoCommit=false, in RAMDir
+            //directory = new MockRAMDirectory();
+            //RunStressTest(directory, false, new ConcurrentMergeScheduler());
+            //directory.Close();
+
+            // With ConcurrentMergeScheduler and autoCommit=false, in FSDir
+            directory = FSDirectory.GetDirectory(dirPath);
+            RunStressTest(directory, false, new ConcurrentMergeScheduler());
+            directory.Close();
+
+            _TestUtil.RmDir(dirPath);
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestStressIndexing2.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestStressIndexing2.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestStressIndexing2.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestStressIndexing2.cs Wed Jul 29 18:04:12 2009
@@ -19,14 +19,15 @@
 
 using Lucene.Net.Documents;
 using Lucene.Net.Store;
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 using Lucene.Net.Analysis;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using _TestUtil = Lucene.Net.Util._TestUtil;
+using TermQuery = Lucene.Net.Search.TermQuery;
 
 using NUnit.Framework;
 
 namespace Lucene.Net.Index
 {
-	
 	[TestFixture]
 	public class TestStressIndexing2 : LuceneTestCase
 	{
@@ -50,7 +51,22 @@
 		internal static int seed = 0;
 		
 		internal static System.Random r = new System.Random((System.Int32) 0);
-		
+
+        public class MockIndexWriter : IndexWriter
+        {
+            public MockIndexWriter(Directory dir, bool autoCommit, Analyzer a, bool create)
+                : base(dir, autoCommit, a, create)
+            {
+            }
+
+            protected override bool TestPoint(string name)
+            {
+                if (TestStressIndexing2.r.Next(4) == 2)
+                    System.Threading.Thread.Sleep(1);
+                return true;
+            }
+        }
+
 		[Test]
 		public virtual void  TestRandom()
 		{
@@ -65,8 +81,8 @@
 		public virtual void  TestMultiConfig()
 		{
 			// test lots of smaller different params together
-			for (int i = 0; i < 100; i++)
-			{
+            for (int i = 0; i < 100; i++)
+            {
 				// increase iterations for better testing
 				sameFieldOrder = r.NextDouble() > 0.5;
 				autoCommit = r.NextDouble() > 0.5;
@@ -97,50 +113,55 @@
 		
 		public virtual System.Collections.IDictionary IndexRandom(int nThreads, int iterations, int range, Directory dir)
 		{
-			IndexWriter w = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-			w.SetUseCompoundFile(false);
-			
-			// force many merges
-			w.SetMergeFactor(mergeFactor);
-			w.SetRAMBufferSizeMB(.1);
-			w.SetMaxBufferedDocs(maxBufferedDocs);
-			
-			threads = new IndexingThread[nThreads];
-			for (int i = 0; i < threads.Length; i++)
-			{
-				IndexingThread th = new IndexingThread();
-				th.w = w;
-				th.base_Renamed = 1000000 * i;
-				th.range = range;
-				th.iterations = iterations;
-				threads[i] = th;
-			}
-			
-			for (int i = 0; i < threads.Length; i++)
-			{
-				threads[i].Start();
-			}
-			for (int i = 0; i < threads.Length; i++)
-			{
-				threads[i].Join();
-			}
-			
-			// w.optimize();
-			w.Close();
-			
-			System.Collections.IDictionary docs = new System.Collections.Hashtable();
-			for (int i = 0; i < threads.Length; i++)
-			{
-				IndexingThread th = threads[i];
-				lock (th)
-				{
-					System.Collections.IEnumerator e = th.docs.Keys.GetEnumerator();
-					while (e.MoveNext())
-					{
-						docs[e.Current] = th.docs[e.Current];
-					}
-				}
-			}
+            System.Collections.Hashtable docs = new System.Collections.Hashtable();
+            for (int iter = 0; iter < 3; iter++)
+            {
+                IndexWriter w = new MockIndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+                w.SetUseCompoundFile(false);
+
+                // force many merges
+                w.SetMergeFactor(mergeFactor);
+                w.SetRAMBufferSizeMB(.1);
+                w.SetMaxBufferedDocs(maxBufferedDocs);
+
+                threads = new IndexingThread[nThreads];
+                for (int i = 0; i < threads.Length; i++)
+                {
+                    IndexingThread th = new IndexingThread();
+                    th.w = w;
+                    th.base_Renamed = 1000000 * i;
+                    th.range = range;
+                    th.iterations = iterations;
+                    threads[i] = th;
+                }
+
+                for (int i = 0; i < threads.Length; i++)
+                {
+                    threads[i].Start();
+                }
+                for (int i = 0; i < threads.Length; i++)
+                {
+                    threads[i].Join();
+                }
+
+                // w.optimize();
+                w.Close();
+
+                for (int i = 0; i < threads.Length; i++)
+                {
+                    IndexingThread th = threads[i];
+                    lock (th)
+                    {
+                        System.Collections.IEnumerator e = th.docs.Keys.GetEnumerator();
+                        while (e.MoveNext())
+                        {
+                            docs[e.Current] = th.docs[e.Current];
+                        }
+                    }
+                }
+            }
+
+            _TestUtil.CheckIndex(dir);
 			
 			return docs;
 		}
@@ -148,7 +169,7 @@
 		
 		public static void  IndexSerial(System.Collections.IDictionary docs, Directory dir)
 		{
-			IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer());
+			IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
 			
 			// index all docs in a single thread
 			System.Collections.IEnumerator iter = docs.Values.GetEnumerator();
@@ -212,7 +233,14 @@
 					break;
 				
 				termDocs1.Seek(termEnum);
-				Assert.IsTrue(termDocs1.Next());
+                if (!termDocs1.Next())
+                {
+                    // This doc is deleted and wasn't replaced
+                    termDocs2.Seek(termEnum);
+                    Assert.IsFalse(termDocs2.Next());
+                    continue;
+                }
+
 				int id1 = termDocs1.Doc();
 				Assert.IsFalse(termDocs1.Next());
 				
@@ -231,7 +259,7 @@
 					// verify term vectors are equivalent        
 					VerifyEquals(r1.GetTermFreqVectors(id1), r2.GetTermFreqVectors(id2));
 				}
-				catch (System.ApplicationException e)
+				catch (System.Exception e)
 				{
 					System.Console.Out.WriteLine("FAILED id=" + term + " id1=" + id1 + " id2=" + id2);
 					TermFreqVector[] tv1 = r1.GetTermFreqVectors(id1);
@@ -413,7 +441,9 @@
 			{
 				TermFreqVector v1 = d1[i];
 				TermFreqVector v2 = d2[i];
-				Assert.AreEqual(v1.Size(), v2.Size());
+                if (v1 == null || v2 == null)
+                    System.Console.Out.WriteLine("v1=" + v1 + " v2=" + v2 + " i=" + i + " of " + d1.Length);
+                Assert.AreEqual(v1.Size(), v2.Size());
 				int numTerms = v1.Size();
 				System.String[] terms1 = v1.GetTerms();
 				System.String[] terms2 = v2.GetTerms();
@@ -468,10 +498,65 @@
 			{
 				return r.Next(lim);
 			}
-			
-			public virtual System.String GetString(int nTokens)
+
+            // start is inclusive and end is exclusive
+            public int NextInt(int start, int end)
+            {
+                return start + r.Next(end - start);
+            }
+
+            internal char[] buffer = new char[100];
+
+            private int AddUTF8Token(int start)
+            {
+                int end = start + NextInt(20);
+                if (buffer.Length < 1 + end)
+                {
+                    char[] newBuffer = new char[(int)((1 + end) * 1.25)];
+                    System.Array.Copy(buffer, 0, newBuffer, 0, buffer.Length);
+                    buffer = newBuffer;
+                }
+
+                for (int i = start; i < end; i++)
+                {
+                    int t = NextInt(6);
+                    if (0 == t && i < end - 1)
+                    {
+                        // make a surrogate pair
+                        // high surrogate
+                        buffer[i++] = (char)NextInt(0xD800, 0xDC00);
+                        // low surrogate
+                        buffer[i] = (char)NextInt(0xDC00, 0xE000);
+                    }
+                    else if (t <= 1)
+                        buffer[i] = (char)NextInt(0x80);
+                    else if (t == 2)
+                        buffer[i] = (char)NextInt(0x80, 0x800);
+                    else if (t == 3)
+                        buffer[i] = (char)NextInt(0x800, 0xD800);
+                    else if (t == 4)
+                        buffer[i] = (char)NextInt(0xE000, 0xFFFF);
+                    else if (t == 5)
+                    {
+                        // illegal unpaired surrogate
+                        if (r.Next(2) == 0)
+                            buffer[i] = (char)NextInt(0xD800, 0xDC00);
+                        else
+                            buffer[i] = (char)NextInt(0xDC00, 0xE000);
+                    }
+                }
+                buffer[end] = ' ';
+                return 1 + end;
+            }
+
+            public virtual System.String GetString(int nTokens)
 			{
 				nTokens = nTokens != 0?nTokens:r.Next(4) + 1;
+
+                // 1/2 the time, make a random UTF-8 string
+                if (r.Next(2) == 0)
+                    return GetUTF8String(nTokens);
+
 				// avoid StringBuffer because it adds extra synchronization.
 				char[] arr = new char[nTokens * 2];
 				for (int i = 0; i < nTokens; i++)
@@ -481,16 +566,28 @@
 				}
 				return new System.String(arr);
 			}
-			
-			
-			public virtual void  IndexDoc()
+
+            public string GetUTF8String(int nTokens)
+            {
+                int upto = 0;
+                SupportClass.CollectionsSupport.ArrayFill(buffer, (char)0);
+                for (int i = 0; i < nTokens; i++)
+                    upto = AddUTF8Token(upto);
+                return new string(buffer, 0, upto);
+            }
+
+            public string GetIdString()
+            {
+                return "" + (base_Renamed + NextInt(range));
+            }
+
+            public virtual void IndexDoc()
 			{
 				Document d = new Document();
 				
 				System.Collections.ArrayList fields = new System.Collections.ArrayList();
-				int id = base_Renamed + NextInt(range);
-				System.String idString = "" + id;
-				Field idField = new Field("id", idString, Field.Store.YES, Field.Index.NO_NORMS);
+                System.String idString = GetIdString();
+				Field idField = new Field(idTerm.Field(), idString, Field.Store.YES, Field.Index.NOT_ANALYZED);
 				fields.Add(idField);
 				
 				int nFields = NextInt(Lucene.Net.Index.TestStressIndexing2.maxFields);
@@ -522,19 +619,19 @@
                     {
 
                         case 0:
-                            fields.Add(new Field("f0", GetString(1), Field.Store.YES, Field.Index.NO_NORMS, tvVal));
+                            fields.Add(new Field("f" + NextInt(100), GetString(1), Field.Store.YES, Field.Index.NOT_ANALYZED, tvVal));
                             break;
 
                         case 1:
-                            fields.Add(new Field("f1", GetString(0), Field.Store.NO, Field.Index.TOKENIZED, tvVal));
+                            fields.Add(new Field("f" + NextInt(100), GetString(0), Field.Store.NO, Field.Index.ANALYZED, tvVal));
                             break;
 
                         case 2:
-                            fields.Add(new Field("f2", GetString(0), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
+                            fields.Add(new Field("f" + NextInt(100), GetString(0), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
                             break;
 
                         case 3:
-                            fields.Add(new Field("f3", GetString(Lucene.Net.Index.TestStressIndexing2.bigFieldSize), Field.Store.YES, Field.Index.TOKENIZED, tvVal));
+                            fields.Add(new Field("f" + NextInt(100), GetString(Lucene.Net.Index.TestStressIndexing2.bigFieldSize), Field.Store.YES, Field.Index.ANALYZED, tvVal));
                             break;
                     }
                 }
@@ -560,6 +657,20 @@
 				docs[idString] = d;
 			}
 			
+            public void DeleteDoc()
+            {
+                string idString = GetIdString();
+                w.DeleteDocuments(idTerm.CreateTerm(idString));
+                docs.Remove(idString);
+            }
+
+            public void DeleteByQuery()
+            {
+                string idString = GetIdString();
+                w.DeleteDocuments(new TermQuery(idTerm.CreateTerm(idString)));
+                docs.Remove(idString);
+            }
+
 			override public void  Run()
 			{
 				try
@@ -567,8 +678,20 @@
 					r = new System.Random((System.Int32) (base_Renamed + range + Lucene.Net.Index.TestStressIndexing2.seed));
 					for (int i = 0; i < iterations; i++)
 					{
-						IndexDoc();
-					}
+                        int what = NextInt(100);
+                        if (what < 5)
+                        {
+                            DeleteDoc();
+                        }
+                        else if (what < 10)
+                        {
+                            DeleteByQuery();
+                        }
+                        else
+                        {
+						    IndexDoc();
+					    }
+                    }
 				}
 				catch (System.Exception e)
 				{

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestTermVectorsReader.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs Wed Jul 29 18:04:12 2009
@@ -119,7 +119,7 @@
 			}
 			System.Array.Sort(tokens);
 			
-			IndexWriter writer = new IndexWriter(dir, new MyAnalyzer(this), true);
+			IndexWriter writer = new IndexWriter(dir, new MyAnalyzer(this), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetUseCompoundFile(false);
 			Document doc = new Document();
 			for (int i = 0; i < testFields.Length; i++)
@@ -133,7 +133,7 @@
 					tv = Field.TermVector.WITH_OFFSETS;
 				else
 					tv = Field.TermVector.YES;
-				doc.Add(new Field(testFields[i], "", Field.Store.NO, Field.Index.TOKENIZED, tv));
+				doc.Add(new Field(testFields[i], "", Field.Store.NO, Field.Index.ANALYZED, tv));
 			}
 			
 			//Create 5 documents for testing, they all have the same
@@ -167,22 +167,19 @@
 				
 			}
 			internal int tokenUpto;
-			public override Token Next()
+			public override Token Next(Token reusableToken)
 			{
 				if (tokenUpto >= Enclosing_Instance.tokens.Length)
 					return null;
 				else
 				{
-					Token t = new Token();
 					TestToken testToken = Enclosing_Instance.tokens[tokenUpto++];
-					t.SetTermText(testToken.text);
+                    reusableToken.Reinit(testToken.text, testToken.startOffset, testToken.endOffset);
 					if (tokenUpto > 1)
-						t.SetPositionIncrement(testToken.pos - Enclosing_Instance.tokens[tokenUpto - 2].pos);
+						reusableToken.SetPositionIncrement(testToken.pos - Enclosing_Instance.tokens[tokenUpto - 2].pos);
 					else
-						t.SetPositionIncrement(testToken.pos + 1);
-					t.SetStartOffset(testToken.startOffset);
-					t.SetEndOffset(testToken.endOffset);
-					return t;
+                        reusableToken.SetPositionIncrement(testToken.pos + 1);
+                    return reusableToken;
 				}
 			}
 		}

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestTermdocPerf.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestTermdocPerf.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestTermdocPerf.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestTermdocPerf.cs Wed Jul 29 18:04:12 2009
@@ -44,12 +44,13 @@
 		public RepeatingTokenStream(System.String val)
 		{
 			t = new Token(val, 0, val.Length);
+            t.SetTermBuffer(val);
 		}
 		
-		public override Token Next()
+		public override Token Next(Token reusableToken)
 		{
-			return --num < 0?null:t;
-		}
+            return --num < 0 ? null : (Token)(t.Clone());
+        }
 	}
 	
 	[TestFixture]
@@ -100,8 +101,8 @@
 			Analyzer analyzer = new AnonymousClassAnalyzer(random, percentDocs, ts, maxTF, this);
 			
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-			doc.Add(new Field(field, val, Field.Store.NO, Field.Index.NO_NORMS));
-			IndexWriter writer = new IndexWriter(dir, analyzer, true);
+			doc.Add(new Field(field, val, Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
+			IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetMaxBufferedDocs(100);
 			writer.SetMergeFactor(100);
 			

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestThreadedOptimize.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestThreadedOptimize.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestThreadedOptimize.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestThreadedOptimize.cs Wed Jul 29 18:04:12 2009
@@ -70,8 +70,8 @@
 						for (int k = 0; k < 17 * (1 + iFinal); k++)
 						{
 							Document d = new Document();
-							d.Add(new Field("id", iterFinal + "_" + iFinal + "_" + j + "_" + k, Field.Store.YES, Field.Index.UN_TOKENIZED));
-							d.Add(new Field("contents", English.IntToEnglish(iFinal + k), Field.Store.NO, Field.Index.TOKENIZED));
+							d.Add(new Field("id", iterFinal + "_" + iFinal + "_" + j + "_" + k, Field.Store.YES, Field.Index.NOT_ANALYZED));
+							d.Add(new Field("contents", English.IntToEnglish(iFinal + k), Field.Store.NO, Field.Index.ANALYZED));
 							writerFinal.AddDocument(d);
 						}
 						for (int k = 0; k < 9 * (1 + iFinal); k++)
@@ -93,10 +93,10 @@
 		private const int NUM_THREADS = 3;
 		//private final static int NUM_THREADS = 5;
 		
-		private const int NUM_ITER = 2;
+		private const int NUM_ITER = 1;
 		//private final static int NUM_ITER = 10;
 		
-		private const int NUM_ITER2 = 2;
+		private const int NUM_ITER2 = 1;
 		//private final static int NUM_ITER2 = 5;
 		
 		private bool failed;
@@ -123,8 +123,8 @@
 				for (int i = 0; i < 200; i++)
 				{
 					Document d = new Document();
-					d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
-					d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.TOKENIZED));
+					d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
+					d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.ANALYZED));
 					writer.AddDocument(d);
 				}
 				
@@ -179,8 +179,8 @@
 		public virtual void  TestThreadedOptimize_Renamed_Method()
 		{
 			Directory directory = new MockRAMDirectory();
-			RunTest(directory, false, null);
-			RunTest(directory, true, null);
+            RunTest(directory, false, new SerialMergeScheduler());
+            RunTest(directory, true, new SerialMergeScheduler());
 			RunTest(directory, false, new ConcurrentMergeScheduler());
 			RunTest(directory, true, new ConcurrentMergeScheduler());
 			directory.Close();
@@ -191,8 +191,8 @@
 			
 			System.String dirName = tempDir + "/luceneTestThreadedOptimize";
 			directory = FSDirectory.GetDirectory(dirName);
-			RunTest(directory, false, null);
-			RunTest(directory, true, null);
+            RunTest(directory, false, new SerialMergeScheduler());
+            RunTest(directory, true, new SerialMergeScheduler());
 			RunTest(directory, false, new ConcurrentMergeScheduler());
 			RunTest(directory, true, new ConcurrentMergeScheduler());
 			directory.Close();

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestTransactions.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestTransactions.cs?rev=798995&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestTransactions.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestTransactions.cs Wed Jul 29 18:04:12 2009
@@ -0,0 +1,252 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using NUnit.Framework;
+
+using Lucene.Net.Store;
+using Lucene.Net.Util;
+using Lucene.Net.Analysis;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    [TestFixture]
+    public class TestTransactions : LuceneTestCase
+    {
+        private static readonly System.Random RANDOM = new System.Random();
+        private static volatile bool doFail;
+
+        private class RandomFailure : MockRAMDirectory.Failure
+        {
+            override public void Eval(MockRAMDirectory dir)
+            {
+                if (TestTransactions.doFail && RANDOM.Next() % 10 <= 3)
+                    throw new System.IO.IOException("now failing randomly but on purpose");
+            }
+        }
+
+        private abstract class TimedThread : SupportClass.ThreadClass
+        {
+            internal bool failed;
+            private static int RUN_TIME_SEC = 6;
+            private TimedThread[] allThreads;
+
+            abstract public void DoWork();
+
+            internal TimedThread(TimedThread[] threads)
+            {
+                this.allThreads = threads;
+            }
+
+            public override void Run()
+            {
+                System.DateTime stopTime = System.DateTime.Now.AddSeconds(RUN_TIME_SEC);
+
+                try
+                {
+                    while (System.DateTime.Now < stopTime && !AnyErrors())
+                        DoWork();
+                }
+                catch (System.Exception e)
+                {
+                    System.Console.Out.WriteLine(System.Threading.Thread.CurrentThread + ": exc");
+                    System.Console.Out.WriteLine(e.StackTrace);
+                    failed = true;
+                }
+            }
+
+            private bool AnyErrors()
+            {
+                for (int i = 0; i < allThreads.Length; i++)
+                    if (allThreads[i] != null && allThreads[i].failed)
+                        return true;
+                return false;
+            }
+        }
+
+        private class IndexerThread : TimedThread
+        {
+            Directory dir1;
+            Directory dir2;
+            object lock_Renamed;
+            int nextID;
+
+            public IndexerThread(object lock_Renamed, Directory dir1, Directory dir2, TimedThread[] threads)
+                : base(threads)
+            {
+                this.lock_Renamed = lock_Renamed;
+                this.dir1 = dir1;
+                this.dir2 = dir2;
+            }
+
+            override public void DoWork()
+            {
+
+                IndexWriter writer1 = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+                writer1.SetMaxBufferedDocs(3);
+                writer1.SetMergeFactor(2);
+                ((ConcurrentMergeScheduler)writer1.GetMergeScheduler()).SetSuppressExceptions_ForNUnitTest();
+
+                IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+                // Intentionally use different params so flush/merge
+                // happen @ different times
+                writer2.SetMaxBufferedDocs(2);
+                writer2.SetMergeFactor(3);
+                ((ConcurrentMergeScheduler)writer2.GetMergeScheduler()).SetSuppressExceptions_ForNUnitTest();
+
+                Update(writer1);
+                Update(writer2);
+
+                TestTransactions.doFail = true;
+                try
+                {
+                    lock (lock_Renamed)
+                    {
+                        try
+                        {
+                            writer1.PrepareCommit();
+                        }
+                        catch (System.Exception)
+                        {
+                            writer1.Rollback();
+                            writer2.Rollback();
+                            return;
+                        }
+                        try
+                        {
+                            writer2.PrepareCommit();
+                        }
+                        catch (System.Exception)
+                        {
+                            writer1.Rollback();
+                            writer2.Rollback();
+                            return;
+                        }
+
+                        writer1.Commit();
+                        writer2.Commit();
+                    }
+                }
+                finally
+                {
+                    TestTransactions.doFail = false;
+                }
+
+                writer1.Close();
+                writer2.Close();
+            }
+
+            public void Update(IndexWriter writer)
+            {
+                // Add 10 docs:
+                for (int j = 0; j < 10; j++)
+                {
+                    Document d = new Document();
+                    int n = RANDOM.Next();
+                    d.Add(new Field("id", "" + nextID++, Field.Store.YES, Field.Index.NOT_ANALYZED));
+                    d.Add(new Field("contents", English.IntToEnglish(n), Field.Store.NO, Field.Index.ANALYZED));
+                    writer.AddDocument(d);
+                }
+
+                // Delete 5 docs:
+                int deleteID = nextID - 1;
+                for (int j = 0; j < 5; j++)
+                {
+                    writer.DeleteDocuments(new Term("id", "" + deleteID));
+                    deleteID -= 2;
+                }
+            }
+        }
+
+        private class SearcherThread : TimedThread
+        {
+            Directory dir1;
+            Directory dir2;
+            object lock_Renamed;
+
+            public SearcherThread(object lock_Renamed, Directory dir1, Directory dir2, TimedThread[] threads)
+                : base(threads)
+            {
+                this.lock_Renamed = lock_Renamed;
+                this.dir1 = dir1;
+                this.dir2 = dir2;
+            }
+
+            override public void DoWork()
+            {
+                IndexReader r1, r2;
+                lock (lock_Renamed)
+                {
+                    r1 = IndexReader.Open(dir1);
+                    r2 = IndexReader.Open(dir2);
+                }
+                if (r1.NumDocs() != r2.NumDocs())
+                    throw new System.Exception("doc counts differ: r1=" + r1.NumDocs() + " r2=" + r2.NumDocs());
+                r1.Close();
+                r2.Close();
+            }
+        }
+
+        public void InitIndex(Directory dir)
+        {
+            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            for (int j = 0; j < 7; j++)
+            {
+                Document d = new Document();
+                int n = RANDOM.Next();
+                d.Add(new Field("contents", English.IntToEnglish(n), Field.Store.NO, Field.Index.ANALYZED));
+                writer.AddDocument(d);
+            }
+            writer.Close();
+        }
+
+        [Test]
+        public void TestTransactions_Renamed()
+        {
+            MockRAMDirectory dir1 = new MockRAMDirectory();
+            MockRAMDirectory dir2 = new MockRAMDirectory();
+            dir1.SetPreventDoubleWrite(false);
+            dir2.SetPreventDoubleWrite(false);
+            dir1.FailOn(new RandomFailure());
+            dir2.FailOn(new RandomFailure());
+
+            InitIndex(dir1);
+            InitIndex(dir2);
+
+            TimedThread[] threads = new TimedThread[3];
+            int numThread = 0;
+
+            IndexerThread indexerThread = new IndexerThread(this, dir1, dir2, threads);
+            threads[numThread++] = indexerThread;
+            indexerThread.Start();
+
+            SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads);
+            threads[numThread++] = searcherThread1;
+            searcherThread1.Start();
+
+            SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads);
+            threads[numThread++] = searcherThread2;
+            searcherThread2.Start();
+
+            for (int i = 0; i < numThread; i++)
+                threads[i].Join();
+
+            for (int i = 0; i < numThread; i++)
+                Assert.IsTrue(!((TimedThread)threads[i]).failed);
+        }
+    }
+}

Added: incubator/lucene.net/trunk/C#/src/Test/Index/index.19.cfs.zip
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/index.19.cfs.zip?rev=798995&view=auto
==============================================================================
Binary file - no diff available.

Propchange: incubator/lucene.net/trunk/C#/src/Test/Index/index.19.cfs.zip
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: incubator/lucene.net/trunk/C#/src/Test/Index/index.19.nocfs.zip
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/index.19.nocfs.zip?rev=798995&view=auto
==============================================================================
Binary file - no diff available.

Propchange: incubator/lucene.net/trunk/C#/src/Test/Index/index.19.nocfs.zip
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: incubator/lucene.net/trunk/C#/src/Test/Index/index.20.cfs.zip
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/index.20.cfs.zip?rev=798995&view=auto
==============================================================================
Binary file - no diff available.

Propchange: incubator/lucene.net/trunk/C#/src/Test/Index/index.20.cfs.zip
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: incubator/lucene.net/trunk/C#/src/Test/Index/index.20.nocfs.zip
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/index.20.nocfs.zip?rev=798995&view=auto
==============================================================================
Binary file - no diff available.

Propchange: incubator/lucene.net/trunk/C#/src/Test/Index/index.20.nocfs.zip
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: incubator/lucene.net/trunk/C#/src/Test/Index/index.21.cfs.zip
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/index.21.cfs.zip?rev=798995&view=auto
==============================================================================
Binary file - no diff available.

Propchange: incubator/lucene.net/trunk/C#/src/Test/Index/index.21.cfs.zip
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: incubator/lucene.net/trunk/C#/src/Test/Index/index.21.nocfs.zip
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/index.21.nocfs.zip?rev=798995&view=auto
==============================================================================
Binary file - no diff available.

Propchange: incubator/lucene.net/trunk/C#/src/Test/Index/index.21.nocfs.zip
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: incubator/lucene.net/trunk/C#/src/Test/Index/index.22.cfs.zip
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/index.22.cfs.zip?rev=798995&view=auto
==============================================================================
Binary file - no diff available.

Propchange: incubator/lucene.net/trunk/C#/src/Test/Index/index.22.cfs.zip
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: incubator/lucene.net/trunk/C#/src/Test/Index/index.22.nocfs.zip
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/index.22.nocfs.zip?rev=798995&view=auto
==============================================================================
Binary file - no diff available.

Propchange: incubator/lucene.net/trunk/C#/src/Test/Index/index.22.nocfs.zip
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: incubator/lucene.net/trunk/C#/src/Test/Index/index.23.cfs.zip
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/index.23.cfs.zip?rev=798995&view=auto
==============================================================================
Binary file - no diff available.

Propchange: incubator/lucene.net/trunk/C#/src/Test/Index/index.23.cfs.zip
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: incubator/lucene.net/trunk/C#/src/Test/Index/index.23.nocfs.zip
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/index.23.nocfs.zip?rev=798995&view=auto
==============================================================================
Binary file - no diff available.

Propchange: incubator/lucene.net/trunk/C#/src/Test/Index/index.23.nocfs.zip
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Modified: incubator/lucene.net/trunk/C#/src/Test/IndexTest.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/IndexTest.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/IndexTest.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/IndexTest.cs Wed Jul 29 18:04:12 2009
@@ -32,10 +32,11 @@
 			try
 			{
 				System.DateTime start = System.DateTime.Now;
-				System.String tempFileName = System.IO.Path.GetTempFileName();
+                System.String tempFileName = System.IO.Path.GetTempFileName();
 				System.String tempDirectory = System.IO.Path.GetDirectoryName(tempFileName);
 				tempFileName = System.IO.Path.GetFileName(tempFileName);
-				IndexWriter writer = new IndexWriter(System.IO.Path.Combine(tempDirectory, "luceneTest") + tempFileName + ".idx", new SimpleAnalyzer(), true);
+				IndexWriter writer = new IndexWriter(System.IO.Path.Combine(tempDirectory, "luceneTest") + tempFileName + ".idx",
+                    new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 
 				writer.SetMergeFactor(20);
 				

Modified: incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/QueryParser/TestMultiAnalyzer.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiAnalyzer.cs Wed Jul 29 18:04:12 2009
@@ -34,10 +34,7 @@
 	/// <summary> Test QueryParser's ability to deal with Analyzers that return more
 	/// than one token per position or that return tokens with a position
 	/// increment &gt; 1.
-	/// 
 	/// </summary>
-	/// <author>  Daniel Naber
-	/// </author>
 	[TestFixture]
 	public class TestMultiAnalyzer : LuceneTestCase
 	{
@@ -178,35 +175,38 @@
 				InitBlock(enclosingInstance);
 			}
 			
-			public override Lucene.Net.Analysis.Token Next()
+			public override Lucene.Net.Analysis.Token Next(Lucene.Net.Analysis.Token reusableToken)
 			{
-				if (Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken > 0)
+				if (TestMultiAnalyzer.multiToken > 0)
 				{
-					Lucene.Net.Analysis.Token token = new Lucene.Net.Analysis.Token("multi" + (Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken + 1), prevToken.StartOffset(), prevToken.EndOffset(), prevToken.Type());
-					token.SetPositionIncrement(0);
-					Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken--;
-					return token;
+                    reusableToken.Reinit("multi" + (TestMultiAnalyzer.multiToken + 1), prevToken.StartOffset(), prevToken.EndOffset(), prevToken.Type());
+					reusableToken.SetPositionIncrement(0);
+					TestMultiAnalyzer.multiToken--;
+					return reusableToken;
 				}
 				else
 				{
-					Lucene.Net.Analysis.Token t = input.Next();
-					prevToken = t;
-					if (t == null)
-						return null;
-					System.String text = t.TermText();
+					Lucene.Net.Analysis.Token nextToken = input.Next(reusableToken);
+                    if (nextToken == null)
+                    {
+                        prevToken = null;
+                        return null;
+                    }
+                    prevToken = (Lucene.Net.Analysis.Token)(nextToken.Clone());
+					string text = nextToken.Term();
 					if (text.Equals("triplemulti"))
 					{
-						Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken = 2;
-						return t;
+						TestMultiAnalyzer.multiToken = 2;
+						return nextToken;
 					}
 					else if (text.Equals("multi"))
 					{
-						Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken = 1;
-						return t;
+						TestMultiAnalyzer.multiToken = 1;
+						return nextToken;
 					}
 					else
 					{
-						return t;
+						return nextToken;
 					}
 				}
 			}
@@ -266,25 +266,23 @@
 				InitBlock(enclosingInstance);
 			}
 			
-			public override Lucene.Net.Analysis.Token Next()
+			public override Lucene.Net.Analysis.Token Next(Lucene.Net.Analysis.Token reusableToken)
 			{
-				for (Lucene.Net.Analysis.Token t = input.Next(); t != null; t = input.Next())
+                for (Lucene.Net.Analysis.Token nextToken = input.Next(reusableToken); nextToken != null; nextToken = input.Next(reusableToken))
 				{
-					if (t.TermText().Equals("the"))
+					if (nextToken.Term().Equals("the"))
 					{
 						// stopword, do nothing
 					}
-					else if (t.TermText().Equals("quick"))
+					else if (nextToken.Term().Equals("quick"))
 					{
-						Lucene.Net.Analysis.Token token = new Lucene.Net.Analysis.Token(t.TermText(), t.StartOffset(), t.EndOffset(), t.Type());
-						token.SetPositionIncrement(2);
-						return token;
+						nextToken.SetPositionIncrement(2);
+						return nextToken;
 					}
 					else
 					{
-						Lucene.Net.Analysis.Token token = new Lucene.Net.Analysis.Token(t.TermText(), t.StartOffset(), t.EndOffset(), t.Type());
-						token.SetPositionIncrement(1);
-						return token;
+						nextToken.SetPositionIncrement(1);
+                        return nextToken;
 					}
 				}
 				return null;

Modified: incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiFieldQueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/QueryParser/TestMultiFieldQueryParser.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiFieldQueryParser.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiFieldQueryParser.cs Wed Jul 29 18:04:12 2009
@@ -19,29 +19,26 @@
 
 using NUnit.Framework;
 
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
-using IndexWriter = Lucene.Net.Index.IndexWriter;
-using Directory = Lucene.Net.Store.Directory;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
 using Token = Lucene.Net.Analysis.Token;
 using TokenStream = Lucene.Net.Analysis.TokenStream;
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
 using BooleanClause = Lucene.Net.Search.BooleanClause;
-using BooleanQuery = Lucene.Net.Search.BooleanQuery;
-using Hits = Lucene.Net.Search.Hits;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using Query = Lucene.Net.Search.Query;
+using ScoreDoc = Lucene.Net.Search.ScoreDoc;
 using Occur = Lucene.Net.Search.BooleanClause.Occur;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.QueryParsers
 {
 	
 	/// <summary> Tests QueryParser.</summary>
-	/// <author>  Daniel Naber
-	/// </author>
 	[TestFixture]
 	public class TestMultiFieldQueryParser : LuceneTestCase
 	{
@@ -121,6 +118,10 @@
 			q = mfqp.Parse("\"foo bar\"~4");
 			Assert.AreEqual("b:\"foo bar\"~4 t:\"foo bar\"~4", q.ToString());
 			
+            // LUCENE-1213: MultiFieldQueryParser was ignoring slop when phrase had a field
+            q = mfqp.Parse("b:\"foo bar\"~4");
+            Assert.AreEqual("b:\"foo bar\"~4", q.ToString());
+
 			// make sure that terms which have a field are not touched:
 			q = mfqp.Parse("one f:two");
 			Assert.AreEqual("(b:one t:one) f:two", q.ToString());
@@ -318,9 +319,9 @@
 		{
 			Analyzer analyzer = new StandardAnalyzer();
 			Directory ramDir = new RAMDirectory();
-			IndexWriter iw = new IndexWriter(ramDir, analyzer, true);
+			IndexWriter iw = new IndexWriter(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-			doc.Add(new Field("body", "blah the footest blah", Field.Store.NO, Field.Index.TOKENIZED));
+			doc.Add(new Field("body", "blah the footest blah", Field.Store.NO, Field.Index.ANALYZED));
 			iw.AddDocument(doc);
 			iw.Close();
 			
@@ -328,8 +329,8 @@
 			mfqp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.Operator.AND);
 			Query q = mfqp.Parse("the footest");
 			IndexSearcher is_Renamed = new IndexSearcher(ramDir);
-			Hits hits = is_Renamed.Search(q);
-			Assert.AreEqual(1, hits.Length());
+			ScoreDoc[] hits = is_Renamed.Search(q, null, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length);
 			is_Renamed.Close();
 		}
 		
@@ -356,7 +357,7 @@
 			
 			private class EmptyTokenStream : TokenStream
 			{
-				public override Lucene.Net.Analysis.Token Next()
+				public override Lucene.Net.Analysis.Token Next(Lucene.Net.Analysis.Token reusableToken)
 				{
 					return null;
 				}

Modified: incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/QueryParser/TestQueryParser.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs Wed Jul 29 18:04:12 2009
@@ -19,17 +19,35 @@
 
 using NUnit.Framework;
 
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using KeywordAnalyzer = Lucene.Net.Analysis.KeywordAnalyzer;
+using LowerCaseTokenizer = Lucene.Net.Analysis.LowerCaseTokenizer;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using StopAnalyzer = Lucene.Net.Analysis.StopAnalyzer;
+using StopFilter = Lucene.Net.Analysis.StopFilter;
+using TokenFilter = Lucene.Net.Analysis.TokenFilter;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using DateField = Lucene.Net.Documents.DateField;
 using DateTools = Lucene.Net.Documents.DateTools;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
+using BooleanQuery = Lucene.Net.Search.BooleanQuery;
+using ConstantScoreRangeQuery = Lucene.Net.Search.ConstantScoreRangeQuery;
+using FuzzyQuery = Lucene.Net.Search.FuzzyQuery;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using MatchAllDocsQuery = Lucene.Net.Search.MatchAllDocsQuery;
+using PhraseQuery = Lucene.Net.Search.PhraseQuery;
+using PrefixQuery = Lucene.Net.Search.PrefixQuery;
+using Query = Lucene.Net.Search.Query;
+using RangeQuery = Lucene.Net.Search.RangeQuery;
+using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+using TermQuery = Lucene.Net.Search.TermQuery;
+using WildcardQuery = Lucene.Net.Search.WildcardQuery;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using Lucene.Net.Analysis;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
-using Lucene.Net.Search;
-using Searchable = Lucene.Net.Search.Searchable;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.QueryParsers
@@ -94,25 +112,26 @@
 			internal bool inPhrase = false;
 			internal int savedStart = 0, savedEnd = 0;
 			
-			public override Lucene.Net.Analysis.Token Next()
+			public override Lucene.Net.Analysis.Token Next(Lucene.Net.Analysis.Token reusableToken)
 			{
+                System.Diagnostics.Debug.Assert(reusableToken != null);
 				if (inPhrase)
 				{
 					inPhrase = false;
-					return new Lucene.Net.Analysis.Token("phrase2", savedStart, savedEnd);
+					return reusableToken.Reinit("phrase2", savedStart, savedEnd);
 				}
 				else
-					for (Lucene.Net.Analysis.Token token = input.Next(); token != null; token = input.Next())
+					for (Lucene.Net.Analysis.Token nextToken = input.Next(reusableToken); nextToken != null; nextToken = input.Next(reusableToken))
 					{
-						if (token.TermText().Equals("phrase"))
+						if (nextToken.Term().Equals("phrase"))
 						{
 							inPhrase = true;
-							savedStart = token.StartOffset();
-							savedEnd = token.EndOffset();
-							return new Lucene.Net.Analysis.Token("phrase1", savedStart, savedEnd);
+							savedStart = nextToken.StartOffset();
+							savedEnd = nextToken.EndOffset();
+							return nextToken.Reinit("phrase1", savedStart, savedEnd);
 						}
-						else if (!token.TermText().Equals("stop"))
-							return token;
+						else if (!nextToken.Term().Equals("stop"))
+							return nextToken;
 					}
 				return null;
 			}
@@ -477,7 +496,53 @@
 			AssertQueryEquals("( bar blar { a TO z}) ", null, "bar blar {a TO z}");
 			AssertQueryEquals("gack ( bar blar { a TO z}) ", null, "gack (bar blar {a TO z})");
 		}
-		
+
+        public void testFarsiRangeCollating()
+        {
+
+            RAMDirectory ramDir = new RAMDirectory();
+            IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(), true,
+                                             IndexWriter.MaxFieldLength.LIMITED);
+            Document doc = new Document();
+            doc.Add(new Field("content", "\u0633\u0627\u0628",
+                              Field.Store.YES, Field.Index.UN_TOKENIZED));
+            iw.AddDocument(doc);
+            iw.Close();
+            IndexSearcher is_Renamed = new IndexSearcher(ramDir);
+
+            QueryParser qp = new QueryParser("content", new WhitespaceAnalyzer());
+
+            // Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in
+            // RuleBasedCollator.  However, the Arabic Locale seems to order the Farsi
+            // characters properly.
+            System.Globalization.CompareInfo c = new System.Globalization.CultureInfo("ar").CompareInfo;
+            qp.SetRangeCollator(c);
+
+            // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
+            // orders the U+0698 character before the U+0633 character, so the single
+            // index Term below should NOT be returned by a ConstantScoreRangeQuery
+            // with a Farsi Collator (or an Arabic one for the case when Farsi is_Renamed not
+            // supported).
+
+            // Test ConstantScoreRangeQuery
+            qp.SetUseOldRangeQuery(false);
+            ScoreDoc[] result = is_Renamed.Search(qp.Parse("[ \u062F TO \u0698 ]"), null, 1000).scoreDocs;
+            Assert.AreEqual(0, result.Length, "The index Term should not be included.");
+
+            result = is_Renamed.Search(qp.Parse("[ \u0633 TO \u0638 ]"), null, 1000).scoreDocs;
+            Assert.AreEqual(1, result.Length, "The index Term should be included.");
+
+            // Test RangeQuery
+            qp.SetUseOldRangeQuery(true);
+            result = is_Renamed.Search(qp.Parse("[ \u062F TO \u0698 ]"), null, 1000).scoreDocs;
+            Assert.AreEqual(0, result.Length, "The index Term should not be included.");
+
+            result = is_Renamed.Search(qp.Parse("[ \u0633 TO \u0638 ]"), null, 1000).scoreDocs;
+            Assert.AreEqual(1, result.Length, "The index Term should be included.");
+
+            is_Renamed.Close();
+        }
+  
 		/// <summary>for testing legacy DateField support </summary>
 		private System.String GetLegacyDate(System.String s)
 		{
@@ -674,6 +739,9 @@
 			AssertQueryEquals("\\\\", a, "\\"); // escaped backslash
 			
 			AssertParseException("\\"); // a backslash must always be escaped
+
+            // LUCENE-1189
+            AssertQueryEquals("(\"a\\\\\") or (\"b\")", a, "a\\ or b");
 		}
 		
 		[Test]
@@ -790,12 +858,12 @@
 		[Test]
 		public virtual void  TestException()
 		{
-			AssertParseException("\"some phrase");
-			AssertParseException("(foo bar");
-			AssertParseException("foo bar))");
-			AssertParseException("field:term:with:colon some more terms");
-			AssertParseException("(sub query)^5.0^2.0 plus more");
-			AssertParseException("secret AND illegal) AND access:confidential");
+            AssertParseException("\"some phrase");
+            AssertParseException("(foo bar");
+            AssertParseException("foo bar))");
+            AssertParseException("field:term:with:colon some more terms");
+            AssertParseException("(sub query)^5.0^2.0 plus more");
+            AssertParseException("secret AND illegal) AND access:confidential");
 		}
 		
 		[Test]
@@ -834,11 +902,11 @@
 			{
 				Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", new WhitespaceAnalyzer());
 				qp.Parse("one two three");
-				Assert.Fail("ParseException expected due to too many boolean clauses");
+				Assert.Fail("ParseException expected due to too many bool clauses");
 			}
 			catch (ParseException)
 			{
-				// too many boolean clauses, so ParseException is expected
+				// too many bool clauses, so ParseException is expected
 			}
 		}
 		
@@ -856,7 +924,7 @@
 		public virtual void  TestLocalDateFormat()
 		{
 			Lucene.Net.Store.RAMDirectory ramDir = new Lucene.Net.Store.RAMDirectory();
-			Lucene.Net.Index.IndexWriter iw = new Lucene.Net.Index.IndexWriter(ramDir, new WhitespaceAnalyzer(), true);
+			Lucene.Net.Index.IndexWriter iw = new Lucene.Net.Index.IndexWriter(ramDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			AddDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
 			AddDateDoc("b", 2005, 12, 4, 22, 15, 0, iw);
 			iw.Close();
@@ -975,16 +1043,16 @@
 			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("date", new WhitespaceAnalyzer());
 			qp.SetLocale(new System.Globalization.CultureInfo("en-US"));
 			Query q = qp.Parse(query);
-			Lucene.Net.Search.Hits hits = is_Renamed.Search(q);
-			Assert.AreEqual(expected, hits.Length());
+			ScoreDoc[] hits = is_Renamed.Search(q, null, 1000).scoreDocs;
+			Assert.AreEqual(expected, hits.Length);
 		}
 		
 		private static void  AddDateDoc(System.String content, int year, int month, int day, int hour, int minute, int second, Lucene.Net.Index.IndexWriter iw)
 		{
 			Lucene.Net.Documents.Document d = new Lucene.Net.Documents.Document();
-			d.Add(new Lucene.Net.Documents.Field("f", content, Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.TOKENIZED));
+			d.Add(new Lucene.Net.Documents.Field("f", content, Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED));
 			System.DateTime tempAux = new System.DateTime(year, month, day, hour, minute, second);
-			d.Add(new Lucene.Net.Documents.Field("date", DateField.DateToString(tempAux), Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.UN_TOKENIZED));
+			d.Add(new Lucene.Net.Documents.Field("date", DateField.DateToString(tempAux), Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.NOT_ANALYZED));
 			iw.AddDocument(d);
 		}
 		

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/BaseTestRangeFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/BaseTestRangeFilter.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/BaseTestRangeFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/BaseTestRangeFilter.cs Wed Jul 29 18:04:12 2009
@@ -31,15 +31,32 @@
 	[TestFixture]
 	public class BaseTestRangeFilter : LuceneTestCase
 	{
-		
 		public const bool F = false;
 		public const bool T = true;
 		
-		internal RAMDirectory index = new RAMDirectory();
 		internal System.Random rand = new System.Random((System.Int32) 101); // use a set seed to test is deterministic
 		
-		internal int maxR = System.Int32.MinValue;
-		internal int minR = System.Int32.MaxValue;
+        /// <summary>
+        /// Collation interacts badly with hyphens -- collation produces different ordering than Unicode code-point
+        /// ordering -- so two indexes are created: one which can't have negative random integers, for testing collated
+        /// ranges, and the other which can have negative random integers, for all other tests
+        /// </summary>
+        internal class TestIndex
+        {
+            internal int maxR = System.Int32.MinValue;
+            internal int minR = System.Int32.MaxValue;
+            internal bool allowNegativeRandomInts;
+            internal RAMDirectory index = new RAMDirectory();
+
+            internal TestIndex(int minR, int maxR, bool allowNegativeRandomInts)
+            {
+                this.minR = minR;
+                this.maxR = maxR;
+                this.allowNegativeRandomInts = allowNegativeRandomInts;
+            }
+        }
+        internal TestIndex signedIndex = new TestIndex(int.MaxValue, int.MinValue, true);
+        internal TestIndex unsignedIndex = new TestIndex(int.MaxValue, 0, false);
 		
 		internal int minId = 0;
 		internal int maxId = 10000;
@@ -69,36 +86,38 @@
 		
 		public BaseTestRangeFilter(System.String name)
 		{
-			Build();
+            Build(signedIndex);
+            Build(unsignedIndex);
 		}
 		public BaseTestRangeFilter()
 		{
-			Build();
-		}
+            Build(signedIndex);
+            Build(unsignedIndex);
+        }
 		
-		private void  Build()
+		private void  Build(TestIndex index)
 		{
 			try
 			{
 				
 				/* build an index */
-				IndexWriter writer = new IndexWriter(index, new SimpleAnalyzer(), T);
+				IndexWriter writer = new IndexWriter(index.index, new SimpleAnalyzer(), T, IndexWriter.MaxFieldLength.LIMITED);
 				
 				for (int d = minId; d <= maxId; d++)
 				{
 					Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-					doc.Add(new Field("id", Pad(d), Field.Store.YES, Field.Index.UN_TOKENIZED));
-					int r = rand.Next();
-					if (maxR < r)
+					doc.Add(new Field("id", Pad(d), Field.Store.YES, Field.Index.NOT_ANALYZED));
+					int r = index.allowNegativeRandomInts? rand.Next() : rand.Next(int.MaxValue);
+					if (index.maxR < r)
 					{
-						maxR = r;
+						index.maxR = r;
 					}
-					if (r < minR)
+					if (r < index.minR)
 					{
-						minR = r;
+						index.minR = r;
 					}
-					doc.Add(new Field("rand", Pad(r), Field.Store.YES, Field.Index.UN_TOKENIZED));
-					doc.Add(new Field("body", "body", Field.Store.YES, Field.Index.UN_TOKENIZED));
+					doc.Add(new Field("rand", Pad(r), Field.Store.YES, Field.Index.NOT_ANALYZED));
+					doc.Add(new Field("body", "body", Field.Store.YES, Field.Index.NOT_ANALYZED));
 					writer.AddDocument(doc);
 				}
 				

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/CachingWrapperFilterHelper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/CachingWrapperFilterHelper.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/CachingWrapperFilterHelper.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/CachingWrapperFilterHelper.cs Wed Jul 29 18:04:12 2009
@@ -40,7 +40,7 @@
 			this.shouldHaveCache = shouldHaveCache;
 		}
 		
-		public override System.Collections.BitArray Bits(IndexReader reader)
+		public override DocIdSet GetDocIdSet(IndexReader reader)
 		{
 			if (cache == null)
 			{
@@ -50,7 +50,7 @@
 			lock (cache.SyncRoot)
 			{
 				// check cache
-				System.Collections.BitArray cached = (System.Collections.BitArray) cache[reader];
+				DocIdSet cached = (DocIdSet) cache[reader];
 				if (shouldHaveCache)
 				{
 					Assert.IsNotNull(cached, "Cache should have data ");
@@ -66,7 +66,7 @@
 				}
 			}
 			
-			System.Collections.BitArray bits = filter.Bits(reader);
+			DocIdSet bits = filter.GetDocIdSet(reader);
 			
 			lock (cache.SyncRoot)
 			{

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/CheckHits.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/CheckHits.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/CheckHits.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/CheckHits.cs Wed Jul 29 18:04:12 2009
@@ -145,7 +145,7 @@
 				QueryUtils.Check(query, (IndexSearcher) searcher);
 			}
 			
-			Hits hits = searcher.Search(query);
+			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
 			
 			System.Collections.ArrayList correct = new System.Collections.ArrayList(results.Length);
 			for (int i = 0; i < results.Length; i++)
@@ -153,10 +153,10 @@
 				correct.Add(results[i]);
 			}
 
-			System.Collections.ArrayList actual = new System.Collections.ArrayList(hits.Length());
-			for (int i = 0; i < hits.Length(); i++)
+			System.Collections.ArrayList actual = new System.Collections.ArrayList(hits.Length);
+			for (int i = 0; i < hits.Length; i++)
 			{
-				actual.Add(hits.Id(i));
+				actual.Add(hits[i].doc);
 			}
 			
 			Assert.AreEqual(correct.Count, actual.Count);
@@ -171,52 +171,52 @@
 		}
 		
 		/// <summary>Tests that a Hits has an expected order of documents </summary>
-		public static void  CheckDocIds(System.String mes, int[] results, Hits hits)
+		public static void  CheckDocIds(System.String mes, int[] results, ScoreDoc[] hits)
 		{
-			Assert.AreEqual(results.Length, hits.Length(), mes + " nr of hits");
+			Assert.AreEqual(results.Length, hits.Length, mes + " nr of hits");
 			for (int i = 0; i < results.Length; i++)
 			{
-				Assert.AreEqual(results[i], hits.Id(i), mes + " doc nrs for hit " + i);
+				Assert.AreEqual(results[i], hits[i].doc, mes + " doc nrs for hit " + i);
 			}
 		}
 		
 		/// <summary>Tests that two queries have an expected order of documents,
 		/// and that the two queries have the same score values.
 		/// </summary>
-		public static void  CheckHitsQuery(Query query, Hits hits1, Hits hits2, int[] results)
+        public static void CheckHitsQuery(Query query, ScoreDoc[] hits1, ScoreDoc[] hits2, int[] results)
 		{
 			
 			CheckDocIds("hits1", results, hits1);
 			CheckDocIds("hits2", results, hits2);
 			CheckEqual(query, hits1, hits2);
 		}
-		
-		public static void  CheckEqual(Query query, Hits hits1, Hits hits2)
+
+        public static void CheckEqual(Query query, ScoreDoc[] hits1, ScoreDoc[] hits2)
 		{
 			float scoreTolerance = 1.0e-6f;
-			if (hits1.Length() != hits2.Length())
+			if (hits1.Length != hits2.Length)
 			{
-				Assert.Fail("Unequal lengths: hits1=" + hits1.Length() + ",hits2=" + hits2.Length());
+				Assert.Fail("Unequal lengths: hits1=" + hits1.Length + ",hits2=" + hits2.Length);
 			}
-			for (int i = 0; i < hits1.Length(); i++)
+			for (int i = 0; i < hits1.Length; i++)
 			{
-				if (hits1.Id(i) != hits2.Id(i))
+                if (hits1[i].doc != hits2[i].doc)
 				{
 					Assert.Fail("Hit " + i + " docnumbers don't match\n" + Hits2str(hits1, hits2, 0, 0) + "for query:" + query.ToString());
 				}
-				
-				if ((hits1.Id(i) != hits2.Id(i)) || System.Math.Abs(hits1.Score(i) - hits2.Score(i)) > scoreTolerance)
+
+                if ((hits1[i].doc != hits2[i].doc) || System.Math.Abs(hits1[i].score - hits2[i].score) > scoreTolerance)
 				{
-					Assert.Fail("Hit " + i + ", doc nrs " + hits1.Id(i) + " and " + hits2.Id(i) + "\nunequal       : " + hits1.Score(i) + "\n           and: " + hits2.Score(i) + "\nfor query:" + query.ToString());
+					Assert.Fail("Hit " + i + ", doc nrs " + hits1[i].doc + " and " + hits2[i].doc + "\nunequal       : " + hits1[i].score + "\n           and: " + hits2[i].score + "\nfor query:" + query.ToString());
 				}
 			}
 		}
-		
-		public static System.String Hits2str(Hits hits1, Hits hits2, int start, int end)
+
+        public static System.String Hits2str(ScoreDoc[] hits1, ScoreDoc[] hits2, int start, int end)
 		{
 			System.Text.StringBuilder sb = new System.Text.StringBuilder();
-			int len1 = hits1 == null?0:hits1.Length();
-			int len2 = hits2 == null?0:hits2.Length();
+			int len1 = hits1 == null?0:hits1.Length;
+			int len2 = hits2 == null?0:hits2.Length;
 			if (end <= 0)
 			{
 				end = System.Math.Max(len1, len2);
@@ -230,7 +230,7 @@
 				sb.Append("hit=").Append(i).Append(':');
 				if (i < len1)
 				{
-					sb.Append(" doc").Append(hits1.Id(i)).Append('=').Append(hits1.Score(i));
+					sb.Append(" doc").Append(hits1[i].doc).Append('=').Append(hits1[i].score);
 				}
 				else
 				{
@@ -239,7 +239,7 @@
 				sb.Append(",\t");
 				if (i < len2)
 				{
-					sb.Append(" doc").Append(hits2.Id(i)).Append('=').Append(hits2.Score(i));
+					sb.Append(" doc").Append(hits2[i].doc).Append('=').Append(hits2[i].score);
 				}
 				sb.Append('\n');
 			}
@@ -274,7 +274,7 @@
 		/// </summary>
 		/// <seealso cref="ExplanationAsserter">
 		/// </seealso>
-		/// <seealso cref="CheckExplanations(Query, String, Searcher, boolean) for a">
+		/// <seealso cref="CheckExplanations(Query, String, Searcher, bool) for a">
 		/// "deep" testing of the explanation details.
 		/// 
 		/// </seealso>
@@ -429,21 +429,6 @@
 			{
 				base.Search(q, null, new ExplanationAsserter(q, null, this));
 			}
-			public override Hits Search(Query query, Filter filter)
-			{
-				CheckExplanations(query);
-				return base.Search(query, filter);
-			}
-			public override Hits Search(Query query, Sort sort)
-			{
-				CheckExplanations(query);
-				return base.Search(query, sort);
-			}
-			public override Hits Search(Query query, Filter filter, Sort sort)
-			{
-				CheckExplanations(query);
-				return base.Search(query, filter, sort);
-			}
 			public override TopFieldDocs Search(Query query, Filter filter, int n, Sort sort)
 			{
 				

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/Function/FunctionTestSetup.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/Function/FunctionTestSetup.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/Function/FunctionTestSetup.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/Function/FunctionTestSetup.cs Wed Jul 29 18:04:12 2009
@@ -79,7 +79,7 @@
 			// prepare a small index with just a few documents.  
 			dir = new RAMDirectory();
 			anlzr = new StandardAnalyzer();
-			IndexWriter iw = new IndexWriter(dir, anlzr);
+			IndexWriter iw = new IndexWriter(dir, anlzr, IndexWriter.MaxFieldLength.LIMITED);
 			// add docs not exactly in natural ID order, to verify we do check the order of docs by scores
 			int remaining = N_DOCS;
 			bool[] done = new bool[N_DOCS];
@@ -104,19 +104,19 @@
 			Fieldable f;
 			int scoreAndID = i + 1;
 			
-			f = new Field(ID_FIELD, Id2String(scoreAndID), Field.Store.YES, Field.Index.UN_TOKENIZED); // for debug purposes
+			f = new Field(ID_FIELD, Id2String(scoreAndID), Field.Store.YES, Field.Index.NOT_ANALYZED); // for debug purposes
 			f.SetOmitNorms(true);
 			d.Add(f);
 			
-			f = new Field(TEXT_FIELD, "text of doc" + scoreAndID + TextLine(i), Field.Store.NO, Field.Index.TOKENIZED); // for regular search
+			f = new Field(TEXT_FIELD, "text of doc" + scoreAndID + TextLine(i), Field.Store.NO, Field.Index.ANALYZED); // for regular search
 			f.SetOmitNorms(true);
 			d.Add(f);
 			
-			f = new Field(INT_FIELD, "" + scoreAndID, Field.Store.NO, Field.Index.UN_TOKENIZED); // for function scoring
+			f = new Field(INT_FIELD, "" + scoreAndID, Field.Store.NO, Field.Index.NOT_ANALYZED); // for function scoring
 			f.SetOmitNorms(true);
 			d.Add(f);
 			
-			f = new Field(FLOAT_FIELD, scoreAndID + ".000", Field.Store.NO, Field.Index.UN_TOKENIZED); // for function scoring
+			f = new Field(FLOAT_FIELD, scoreAndID + ".000", Field.Store.NO, Field.Index.NOT_ANALYZED); // for function scoring
 			f.SetOmitNorms(true);
 			d.Add(f);
 			
@@ -155,7 +155,7 @@
 		}
 
 		[Test]
-		public virtual void  TestDummy()
+		override public void TestDummy()
 		{
             // So that NUnit doesn't complain
 		}

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/Function/TestCustomScoreQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/Function/TestCustomScoreQuery.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/Function/TestCustomScoreQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/Function/TestCustomScoreQuery.cs Wed Jul 29 18:04:12 2009
@@ -182,7 +182,7 @@
 			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser(TEXT_FIELD, anlzr);
 			System.String qtxt = "first aid text"; // from the doc texts in FunctionQuerySetup.
 			
-			// regular (boolean) query.
+			// regular (bool) query.
 			Query q1 = qp.Parse(qtxt);
 			Log(q1);
 			

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/Function/TestFieldScoreQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/Function/TestFieldScoreQuery.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/Function/TestFieldScoreQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/Function/TestFieldScoreQuery.cs Wed Jul 29 18:04:12 2009
@@ -20,7 +20,6 @@
 using NUnit.Framework;
 
 using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
-using Hits = Lucene.Net.Search.Hits;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using Query = Lucene.Net.Search.Query;
 using QueryUtils = Lucene.Net.Search.QueryUtils;
@@ -104,14 +103,14 @@
 			Query q = new FieldScoreQuery(field, tp);
 			Log("test: " + q);
 			QueryUtils.Check(q, s);
-			Hits h = s.Search(q);
-			Assert.AreEqual(N_DOCS, h.Length(), "All docs should be matched!");
+			ScoreDoc[] h = s.Search(q, null, 1000).scoreDocs;
+			Assert.AreEqual(N_DOCS, h.Length, "All docs should be matched!");
 			System.String prevID = "ID" + (N_DOCS + 1); // greater than all ids of docs in this test
-			for (int i = 0; i < h.Length(); i++)
+			for (int i = 0; i < h.Length; i++)
 			{
-				System.String resID = h.Doc(i).Get(ID_FIELD);
-				Log(i + ".   score=" + h.Score(i) + "  -  " + resID);
-				Log(s.Explain(q, h.Id(i)));
+				System.String resID = s.Doc(h[i].doc).Get(ID_FIELD);
+				Log(i + ".   score=" + h[i].score + "  -  " + resID);
+				Log(s.Explain(q, h[i].doc));
 				Assert.IsTrue(String.CompareOrdinal(resID, prevID) < 0, "res id " + resID + " should be < prev res id " + prevID);
 				prevID = resID;
 			}
@@ -218,8 +217,8 @@
 			for (int i = 0; i < 10; i++)
 			{
 				FieldScoreQuery q = new FieldScoreQuery(field, tp);
-				Hits h = s.Search(q);
-				Assert.AreEqual(N_DOCS, h.Length(), "All docs should be matched!");
+				ScoreDoc[] h = s.Search(q, null, 1000).scoreDocs;
+				Assert.AreEqual(N_DOCS, h.Length, "All docs should be matched!");
 				try
 				{
 					if (i == 0)
@@ -247,8 +246,8 @@
 			// verify new values are reloaded (not reused) for a new reader
 			s = new IndexSearcher(dir);
 			FieldScoreQuery q2 = new FieldScoreQuery(field, tp);
-			Hits h2 = s.Search(q2);
-			Assert.AreEqual(N_DOCS, h2.Length(), "All docs should be matched!");
+			ScoreDoc[] h2 = s.Search(q2, null, 1000).scoreDocs;
+			Assert.AreEqual(N_DOCS, h2.Length, "All docs should be matched!");
 			try
 			{
 				Log("compare: " + innerArray + " to " + q2.ValSrc_ForNUnitTest.GetValues(s.GetIndexReader()).GetInnerArray());

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/Function/TestOrdValues.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/Function/TestOrdValues.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/Function/TestOrdValues.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/Function/TestOrdValues.cs Wed Jul 29 18:04:12 2009
@@ -20,7 +20,6 @@
 using NUnit.Framework;
 
 using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
-using Hits = Lucene.Net.Search.Hits;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using Query = Lucene.Net.Search.Query;
 using QueryUtils = Lucene.Net.Search.QueryUtils;
@@ -95,15 +94,15 @@
 			Query q = new ValueSourceQuery(vs);
 			Log("test: " + q);
 			QueryUtils.Check(q, s);
-			Hits h = s.Search(q);
-			Assert.AreEqual(N_DOCS, h.Length(), "All docs should be matched!");
+			ScoreDoc[] h = s.Search(q,null, 1000).scoreDocs;
+			Assert.AreEqual(N_DOCS, h.Length, "All docs should be matched!");
 			System.String prevID = inOrder?"IE":"IC"; // smaller than all ids of docs in this test ("ID0001", etc.)
 			
-			for (int i = 0; i < h.Length(); i++)
+			for (int i = 0; i < h.Length; i++)
 			{
-				System.String resID = h.Doc(i).Get(ID_FIELD);
-				Log(i + ".   score=" + h.Score(i) + "  -  " + resID);
-				Log(s.Explain(q, h.Id(i)));
+				System.String resID = s.Doc(h[i].doc).Get(ID_FIELD);
+				Log(i + ".   score=" + h[i].score + "  -  " + resID);
+				Log(s.Explain(q, h[i].doc));
 				if (inOrder)
 				{
 					Assert.IsTrue(String.CompareOrdinal(resID, prevID) < 0, "res id " + resID + " should be < prev res id " + prevID);
@@ -195,10 +194,10 @@
 					vs = new ReverseOrdFieldSource(field);
 				}
 				ValueSourceQuery q = new ValueSourceQuery(vs);
-				Hits h = s.Search(q);
-				try
+                ScoreDoc[] h = s.Search(q, null, 1000).scoreDocs;
+                try
 				{
-					Assert.AreEqual(N_DOCS, h.Length(), "All docs should be matched!");
+					Assert.AreEqual(N_DOCS, h.Length, "All docs should be matched!");
 					if (i == 0)
 					{
 						innerArray = q.ValSrc_ForNUnitTest.GetValues(s.GetIndexReader()).GetInnerArray();
@@ -221,7 +220,7 @@
 			
 			ValueSource vs2;
 			ValueSourceQuery q2;
-			Hits h2;
+			ScoreDoc[] h2;
 			
 			// verify that different values are loaded for a different field
 			System.String field2 = INT_FIELD;
@@ -235,8 +234,8 @@
 				vs2 = new ReverseOrdFieldSource(field2);
 			}
 			q2 = new ValueSourceQuery(vs2);
-			h2 = s.Search(q2);
-			Assert.AreEqual(N_DOCS, h2.Length(), "All docs should be matched!");
+			h2 = s.Search(q2, null, 1000).scoreDocs;
+			Assert.AreEqual(N_DOCS, h2.Length, "All docs should be matched!");
 			try
 			{
 				Log("compare (should differ): " + innerArray + " to " + q2.ValSrc_ForNUnitTest.GetValues(s.GetIndexReader()).GetInnerArray());
@@ -262,8 +261,8 @@
 				vs2 = new ReverseOrdFieldSource(field);
 			}
 			q2 = new ValueSourceQuery(vs2);
-			h2 = s.Search(q2);
-			Assert.AreEqual(N_DOCS, h2.Length(), "All docs should be matched!");
+			h2 = s.Search(q2, null, 1000).scoreDocs;
+			Assert.AreEqual(N_DOCS, h2.Length, "All docs should be matched!");
 			try
 			{
 				Log("compare (should differ): " + innerArray + " to " + q2.ValSrc_ForNUnitTest.GetValues(s.GetIndexReader()).GetInnerArray());

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/MockFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/MockFilter.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/MockFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/MockFilter.cs Wed Jul 29 18:04:12 2009
@@ -18,19 +18,25 @@
 using System;
 
 using IndexReader = Lucene.Net.Index.IndexReader;
+using DocIdBitSet = Lucene.Net.Util.DocIdBitSet;
 
 namespace Lucene.Net.Search
 {
-	
 	[Serializable]
 	public class MockFilter : Filter
 	{
 		private bool wasCalled;
-		
-		public override System.Collections.BitArray Bits(IndexReader reader)
+
+        [System.Obsolete()]
+        public override System.Collections.BitArray Bits(IndexReader reader)
+        {
+            return null;
+        }
+
+		public override DocIdSet GetDocIdSet(IndexReader reader)
 		{
 			wasCalled = true;
-			return new System.Collections.BitArray(64);
+			return new DocIdBitSet(new System.Collections.BitArray(64));
 		}
 		
 		public virtual void  Clear()