You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ar...@apache.org on 2009/11/03 19:06:38 UTC

svn commit: r832486 [14/29] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene...

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestPositionBasedTermVectorMapper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestPositionBasedTermVectorMapper.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestPositionBasedTermVectorMapper.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestPositionBasedTermVectorMapper.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -24,8 +24,8 @@
 namespace Lucene.Net.Index
 {
 	
-	[TestFixture]
-	public class TestPositionBasedTermVectorMapper : LuceneTestCase
+    [TestFixture]
+	public class TestPositionBasedTermVectorMapper:LuceneTestCase
 	{
 		protected internal System.String[] tokens;
 		protected internal int[][] thePositions;
@@ -33,9 +33,9 @@
 		protected internal int numPositions;
 		
 		
-		//public TestPositionBasedTermVectorMapper(System.String s):base(s)
-		//{
-		//}
+		public TestPositionBasedTermVectorMapper(System.String s):base(s)
+		{
+		}
 		
 		[SetUp]
 		public override void  SetUp()
@@ -85,32 +85,27 @@
 			
 			Assert.IsTrue(positions.Count == numPositions, "thePositions Size: " + positions.Count + " is not: " + numPositions);
 			System.Collections.BitArray bits = new System.Collections.BitArray((numPositions % 64 == 0?numPositions / 64:numPositions / 64 + 1) * 64);
-			for (System.Collections.IEnumerator iterator = new System.Collections.Hashtable(positions).GetEnumerator(); iterator.MoveNext(); )
+			for (System.Collections.IEnumerator iterator = positions.GetEnumerator(); iterator.MoveNext(); )
 			{
 				System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) iterator.Current;
 				PositionBasedTermVectorMapper.TVPositionInfo info = (PositionBasedTermVectorMapper.TVPositionInfo) entry.Value;
 				Assert.IsTrue(info != null, "info is null and it shouldn't be");
 				int pos = ((System.Int32) entry.Key);
 				bits.Set(pos, true);
-				Assert.IsTrue(info.GetPosition() == pos, info.GetPosition() + " does not equal: " + pos);
-				Assert.IsTrue(info.GetOffsets() != null, "info.getOffsets() is null and it shouldn't be");
+				Assert.IsTrue(info.Position == pos, info.Position + " does not equal: " + pos);
+				Assert.IsTrue(info.Offsets != null, "info.getOffsets() is null and it shouldn't be");
 				if (pos == 0)
 				{
-					Assert.IsTrue(info.GetTerms().Count == 2, "info.getTerms() Size: " + info.GetTerms().Count + " is not: " + 2); //need a test for multiple terms at one pos
-					Assert.IsTrue(info.GetOffsets().Count == 2, "info.getOffsets() Size: " + info.GetOffsets().Count + " is not: " + 2);
+					Assert.IsTrue(info.Terms.Count == 2, "info.getTerms() Size: " + info.Terms.Count + " is not: " + 2); //need a test for multiple terms at one pos
+					Assert.IsTrue(info.Offsets.Count == 2, "info.getOffsets() Size: " + info.Offsets.Count + " is not: " + 2);
 				}
 				else
 				{
-					Assert.IsTrue(info.GetTerms().Count == 1, "info.getTerms() Size: " + info.GetTerms().Count + " is not: " + 1); //need a test for multiple terms at one pos
-					Assert.IsTrue(info.GetOffsets().Count == 1, "info.getOffsets() Size: " + info.GetOffsets().Count + " is not: " + 1);
+					Assert.IsTrue(info.Terms.Count == 1, "info.getTerms() Size: " + info.Terms.Count + " is not: " + 1); //need a test for multiple terms at one pos
+					Assert.IsTrue(info.Offsets.Count == 1, "info.getOffsets() Size: " + info.Offsets.Count + " is not: " + 1);
 				}
 			}
-			int cardinality = 0;
-			for (int i = 0; i < bits.Count; i++)
-			{
-				if (bits.Get(i)) cardinality++;
-			}
-			Assert.IsTrue(cardinality == numPositions, "Bits are not all on");
+			Assert.IsTrue(SupportClass.BitSetSupport.Cardinality(bits) == numPositions, "Bits are not all on");
 		}
-	}
+    }
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentMerger.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestSegmentMerger.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentMerger.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentMerger.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -27,8 +27,8 @@
 namespace Lucene.Net.Index
 {
 	
-	[TestFixture]
-	public class TestSegmentMerger : LuceneTestCase
+    [TestFixture]
+	public class TestSegmentMerger:LuceneTestCase
 	{
 		//The variables for the new merged segment
 		private Directory mergedDir = new RAMDirectory();
@@ -41,32 +41,16 @@
 		private Directory merge2Dir = new RAMDirectory();
 		private Document doc2 = new Document();
 		private SegmentReader reader2 = null;
-
-		// This is needed if for the test to pass and mimic what happens wiht JUnit
-		// For some reason, JUnit is creating a new member variable for each sub-test
-		// but NUnit is not -- who is wrong/right, I don't know.
-		private void SetUpInternal()        // {{Aroush-1.9}} See note above
+		
+		
+		public TestSegmentMerger(System.String s):base(s)
 		{
-			//The variables for the new merged segment
-			mergedDir = new RAMDirectory();
-			mergedSegment = "test";
-			//First segment to be merged
-			merge1Dir = new RAMDirectory();
-			doc1 = new Lucene.Net.Documents.Document();
-			//merge1Segment = "test-1";
-			reader1 = null;
-			//Second Segment to be merged
-			merge2Dir = new RAMDirectory();
-			doc2 = new Lucene.Net.Documents.Document();
-			//merge2Segment = "test-2";
-			reader2 = null;
 		}
-
+		
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
 			base.SetUp();
-			SetUpInternal();
 			DocHelper.SetupDoc(doc1);
 			SegmentInfo info1 = DocHelper.WriteDoc(merge1Dir, doc1);
 			DocHelper.SetupDoc(doc2);
@@ -98,11 +82,11 @@
 			SegmentReader mergedReader = SegmentReader.Get(new SegmentInfo(mergedSegment, docsMerged, mergedDir, false, true));
 			Assert.IsTrue(mergedReader != null);
 			Assert.IsTrue(mergedReader.NumDocs() == 2);
-			Lucene.Net.Documents.Document newDoc1 = mergedReader.Document(0);
+			Document newDoc1 = mergedReader.Document(0);
 			Assert.IsTrue(newDoc1 != null);
 			//There are 2 unstored fields on the document
 			Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(doc1) - DocHelper.unstored.Count);
-			Lucene.Net.Documents.Document newDoc2 = mergedReader.Document(1);
+			Document newDoc2 = mergedReader.Document(1);
 			Assert.IsTrue(newDoc2 != null);
 			Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(doc2) - DocHelper.unstored.Count);
 			
@@ -110,7 +94,7 @@
 			Assert.IsTrue(termDocs != null);
 			Assert.IsTrue(termDocs.Next() == true);
 			
-			System.Collections.Generic.ICollection<string> stored = mergedReader.GetFieldNames(IndexReader.FieldOption.INDEXED_WITH_TERMVECTOR);
+			System.Collections.ICollection stored = mergedReader.GetFieldNames(IndexReader.FieldOption.INDEXED_WITH_TERMVECTOR);
 			Assert.IsTrue(stored != null);
 			//System.out.println("stored size: " + stored.size());
 			Assert.IsTrue(stored.Count == 4, "We do not have 4 fields that were indexed with term vector");

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestSegmentReader.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentReader.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -21,54 +21,34 @@
 
 using Document = Lucene.Net.Documents.Document;
 using Fieldable = Lucene.Net.Documents.Fieldable;
-using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using DefaultSimilarity = Lucene.Net.Search.DefaultSimilarity;
-using Similarity = Lucene.Net.Search.Similarity;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Index
 {
 	
-	[TestFixture]
-	public class TestSegmentReader : LuceneTestCase
+    [TestFixture]
+	public class TestSegmentReader:LuceneTestCase
 	{
 		private RAMDirectory dir = new RAMDirectory();
-		private Lucene.Net.Documents.Document testDoc = new Lucene.Net.Documents.Document();
+		private Document testDoc = new Document();
 		private SegmentReader reader = null;
 		
-		// public TestSegmentReader(System.String s)
-		// {
-		// }
-		
-		// This is needed if for the test to pass and mimic what happens wiht JUnit
-		// For some reason, JUnit is creating a new member variable for each sub-test
-		// but NUnit is not -- who is wrong/right, I don't know.
-		private void SetUpInternal()        // {{Aroush-1.9}} See note above
+		public TestSegmentReader(System.String s):base(s)
 		{
-			dir = new RAMDirectory();
-			testDoc = new Lucene.Net.Documents.Document();
-			reader = null;
 		}
-
+		
 		//TODO: Setup the reader w/ multiple documents
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
 			base.SetUp();
-			SetUpInternal();
 			DocHelper.SetupDoc(testDoc);
 			SegmentInfo info = DocHelper.WriteDoc(dir, testDoc);
 			reader = SegmentReader.Get(info);
 		}
 		
-		[TearDown]
-		public override void TearDown()
-		{
-			
-		}
-		
 		[Test]
 		public virtual void  Test()
 		{
@@ -83,14 +63,15 @@
 		{
 			Assert.IsTrue(reader.NumDocs() == 1);
 			Assert.IsTrue(reader.MaxDoc() >= 1);
-			Lucene.Net.Documents.Document result = reader.Document(0);
+			Document result = reader.Document(0);
 			Assert.IsTrue(result != null);
 			//There are 2 unstored fields on the document that are not preserved across writing
 			Assert.IsTrue(DocHelper.NumFields(result) == DocHelper.NumFields(testDoc) - DocHelper.unstored.Count);
-			System.Collections.IEnumerator e = result.Fields();
-			while (e.MoveNext())
+			
+			System.Collections.IList fields = result.GetFields();
+			for (System.Collections.IEnumerator iter = fields.GetEnumerator(); iter.MoveNext(); )
 			{
-				Lucene.Net.Documents.Field field = (Lucene.Net.Documents.Field) e.Current;
+				Fieldable field = (Fieldable) iter.Current;
 				Assert.IsTrue(field != null);
 				Assert.IsTrue(DocHelper.nameValues.Contains(field.Name()));
 			}
@@ -99,7 +80,7 @@
 		[Test]
 		public virtual void  TestDelete()
 		{
-			Lucene.Net.Documents.Document docToDelete = new Lucene.Net.Documents.Document();
+			Document docToDelete = new Document();
 			DocHelper.SetupDoc(docToDelete);
 			SegmentInfo info = DocHelper.WriteDoc(dir, docToDelete);
 			SegmentReader deleteReader = SegmentReader.Get(info);
@@ -109,42 +90,32 @@
 			Assert.IsTrue(deleteReader.IsDeleted(0) == true);
 			Assert.IsTrue(deleteReader.HasDeletions() == true);
 			Assert.IsTrue(deleteReader.NumDocs() == 0);
-			try
-			{
-				deleteReader.Document(0);
-				Assert.Fail();
-			}
-			catch (System.ArgumentException)
-			{
-				// expcected exception
-			}
 		}
 		
 		[Test]
 		public virtual void  TestGetFieldNameVariations()
 		{
-			System.Collections.Generic.ICollection<string> result = reader.GetFieldNames(IndexReader.FieldOption.ALL);
+			System.Collections.ICollection result = reader.GetFieldNames(IndexReader.FieldOption.ALL);
 			Assert.IsTrue(result != null);
 			Assert.IsTrue(result.Count == DocHelper.all.Count);
 			for (System.Collections.IEnumerator iter = result.GetEnumerator(); iter.MoveNext(); )
 			{
-                System.String s = (System.String)iter.Current;
-                Assert.IsTrue(DocHelper.nameValues.Contains(s) == true || s.Equals(""));
+				System.String s = (System.String) iter.Current;
+				//System.out.println("Name: " + s);
+				Assert.IsTrue(DocHelper.nameValues.Contains(s) == true || s.Equals(""));
 			}
-
 			result = reader.GetFieldNames(IndexReader.FieldOption.INDEXED);
 			Assert.IsTrue(result != null);
 			Assert.IsTrue(result.Count == DocHelper.indexed.Count);
 			for (System.Collections.IEnumerator iter = result.GetEnumerator(); iter.MoveNext(); )
 			{
-                System.String s = (System.String)iter.Current;
-                Assert.IsTrue(DocHelper.indexed.Contains(s) == true || s.Equals(""));
+				System.String s = (System.String) iter.Current;
+				Assert.IsTrue(DocHelper.indexed.Contains(s) == true || s.Equals(""));
 			}
 			
 			result = reader.GetFieldNames(IndexReader.FieldOption.UNINDEXED);
 			Assert.IsTrue(result != null);
 			Assert.IsTrue(result.Count == DocHelper.unindexed.Count);
-
 			//Get all indexed fields that are storing term vectors
 			result = reader.GetFieldNames(IndexReader.FieldOption.INDEXED_WITH_TERMVECTOR);
 			Assert.IsTrue(result != null);
@@ -207,31 +178,38 @@
 			// test omit norms
 			for (int i = 0; i < DocHelper.fields.Length; i++)
 			{
-				Lucene.Net.Documents.Fieldable f = DocHelper.fields[i];
+				Fieldable f = DocHelper.fields[i];
 				if (f.IsIndexed())
 				{
 					Assert.AreEqual(reader.HasNorms(f.Name()), !f.GetOmitNorms());
 					Assert.AreEqual(reader.HasNorms(f.Name()), !DocHelper.noNorms.Contains(f.Name()));
 					if (!reader.HasNorms(f.Name()))
 					{
-						// test for fake norms of 1.0
+						// test for fake norms of 1.0 or null depending on the flag
 						byte[] norms = reader.Norms(f.Name());
-						Assert.AreEqual(norms.Length, reader.MaxDoc());
-						for (int j = 0; j < reader.MaxDoc(); j++)
+						byte norm1 = DefaultSimilarity.EncodeNorm(1.0f);
+						if (reader.GetDisableFakeNorms())
+							Assert.IsNull(norms);
+						else
 						{
-							Assert.AreEqual(norms[j], DefaultSimilarity.EncodeNorm(1.0f));
+							Assert.AreEqual(norms.Length, reader.MaxDoc());
+							for (int j = 0; j < reader.MaxDoc(); j++)
+							{
+								Assert.AreEqual(norms[j], norm1);
+							}
 						}
 						norms = new byte[reader.MaxDoc()];
 						reader.Norms(f.Name(), norms, 0);
 						for (int j = 0; j < reader.MaxDoc(); j++)
 						{
-							Assert.AreEqual(norms[j], DefaultSimilarity.EncodeNorm(1.0f));
+							Assert.AreEqual(norms[j], norm1);
 						}
 					}
 				}
 			}
 		}
 		
+		[Test]
 		public virtual void  TestTermVectors()
 		{
 			TermFreqVector result = reader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
@@ -251,23 +229,5 @@
 			Assert.IsTrue(results != null);
 			Assert.IsTrue(results.Length == 4, "We do not have 4 term freq vectors, we have: " + results.Length);
 		}
-		
-		[Test]
-		public virtual void  TestIndexDivisor()
-		{
-			dir = new MockRAMDirectory();
-			testDoc = new Document();
-			DocHelper.SetupDoc(testDoc);
-			SegmentInfo si = DocHelper.WriteDoc(dir, testDoc);
-			
-			reader = SegmentReader.Get(si);
-			reader.SetTermInfosIndexDivisor(3);
-			TestDocument();
-			TestDelete();
-			TestGetFieldNameVariations();
-			TestNorms();
-			TestTerms();
-			TestTermVectors();
-		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermDocs.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestSegmentTermDocs.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermDocs.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermDocs.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,41 +19,32 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Directory = Lucene.Net.Store.Directory;
 using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using Similarity = Lucene.Net.Search.Similarity;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Index
 {
 	
-	[TestFixture]
-	public class TestSegmentTermDocs : LuceneTestCase
+    [TestFixture]
+	public class TestSegmentTermDocs:LuceneTestCase
 	{
-		private Lucene.Net.Documents.Document testDoc = new Lucene.Net.Documents.Document();
+		private Document testDoc = new Document();
 		private Directory dir = new RAMDirectory();
 		private SegmentInfo info;
 		
-		// got the idea for this from George's note in TestSegmentReader
-		// it seems that JUnit creates a new instance of the class for each test invocation
-		// while NUnit does not (seems like a flaw in JUnit, to be honest)
-		// forcing the re-init of the variables for each run solves the problem
-		private void SetUpInternal()
-		{
-			dir = new RAMDirectory();
-			testDoc = new Lucene.Net.Documents.Document();
-			info = null;
+		public TestSegmentTermDocs(System.String s):base(s)
+		{
 		}
-
+		
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
 			base.SetUp();
-			SetUpInternal();
 			DocHelper.SetupDoc(testDoc);
 			info = DocHelper.WriteDoc(dir, testDoc);
 		}
@@ -73,9 +64,9 @@
 		public virtual void  TestTermDocs(int indexDivisor)
 		{
 			//After adding the document, we should be able to read it back in
-			SegmentReader reader = SegmentReader.Get(info);
-			reader.SetTermInfosIndexDivisor(indexDivisor);
+			SegmentReader reader = SegmentReader.Get(true, info, indexDivisor);
 			Assert.IsTrue(reader != null);
+			Assert.AreEqual(indexDivisor, reader.GetTermInfosIndexDivisor());
 			SegmentTermDocs segTermDocs = new SegmentTermDocs(reader);
 			Assert.IsTrue(segTermDocs != null);
 			segTermDocs.Seek(new Term(DocHelper.TEXT_FIELD_2_KEY, "field"));
@@ -92,15 +83,14 @@
 		[Test]
 		public virtual void  TestBadSeek()
 		{
-			TestBadSeek(1);
+			testBadSeek(1);
 		}
 		
-		public virtual void  TestBadSeek(int indexDivisor)
+		public virtual void  testBadSeek(int indexDivisor)
 		{
 			{
 				//After adding the document, we should be able to read it back in
-				SegmentReader reader = SegmentReader.Get(info);
-				reader.SetTermInfosIndexDivisor(indexDivisor);
+				SegmentReader reader = SegmentReader.Get(true, info, indexDivisor);
 				Assert.IsTrue(reader != null);
 				SegmentTermDocs segTermDocs = new SegmentTermDocs(reader);
 				Assert.IsTrue(segTermDocs != null);
@@ -110,8 +100,7 @@
 			}
 			{
 				//After adding the document, we should be able to read it back in
-				SegmentReader reader = SegmentReader.Get(info);
-				reader.SetTermInfosIndexDivisor(indexDivisor);
+				SegmentReader reader = SegmentReader.Get(true, info, indexDivisor);
 				Assert.IsTrue(reader != null);
 				SegmentTermDocs segTermDocs = new SegmentTermDocs(reader);
 				Assert.IsTrue(segTermDocs != null);
@@ -124,10 +113,10 @@
 		[Test]
 		public virtual void  TestSkipTo()
 		{
-			TestSkipTo(1);
+			testSkipTo(1);
 		}
 		
-		public virtual void  TestSkipTo(int indexDivisor)
+		public virtual void  testSkipTo(int indexDivisor)
 		{
 			Directory dir = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
@@ -148,9 +137,7 @@
 			writer.Optimize();
 			writer.Close();
 			
-			IndexReader reader = IndexReader.Open(dir);
-			reader.SetTermInfosIndexDivisor(indexDivisor);
-			Assert.AreEqual(indexDivisor, reader.GetTermInfosIndexDivisor());
+			IndexReader reader = IndexReader.Open(dir, null, true, indexDivisor);
 			
 			TermDocs tdocs = reader.TermDocs();
 			
@@ -263,33 +250,13 @@
 			DocHelper.SetupDoc(testDoc);
 			DocHelper.WriteDoc(dir, testDoc);
 			TestTermDocs(2);
-			TestBadSeek(2);
-			TestSkipTo(2);
-		}
-		
-		[Test]
-		public virtual void  TestIndexDivisorAfterLoad()
-		{
-			dir = new MockRAMDirectory();
-			testDoc = new Document();
-			DocHelper.SetupDoc(testDoc);
-			SegmentInfo si = DocHelper.WriteDoc(dir, testDoc);
-			SegmentReader reader = SegmentReader.Get(si);
-			Assert.AreEqual(1, reader.DocFreq(new Term("keyField", "Keyword")));
-			try
-			{
-				reader.SetTermInfosIndexDivisor(2);
-				Assert.Fail("did not hit IllegalStateException exception");
-			}
-			catch (System.SystemException)
-			{
-				// expected
-			}
+			testBadSeek(2);
+			testSkipTo(2);
 		}
 		
 		private void  AddDoc(IndexWriter writer, System.String value_Renamed)
 		{
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field("content", value_Renamed, Field.Store.NO, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 		}

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermEnum.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestSegmentTermEnum.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermEnum.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermEnum.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,21 +19,20 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Directory = Lucene.Net.Store.Directory;
 using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 
 namespace Lucene.Net.Index
 {
 	
-	/// <author>  goller
-	/// </author>
-	[TestFixture]
-	public class TestSegmentTermEnum : LuceneTestCase
+	
+    [TestFixture]
+	public class TestSegmentTermEnum:LuceneTestCase
 	{
 		internal Directory dir = new RAMDirectory();
 		
@@ -44,7 +43,7 @@
 			
 			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
-			// add 100 documents with term : aaa
+			// ADD 100 documents with term : aaa
 			// add 100 documents with terms: aaa bbb
 			// Therefore, term 'aaa' has document frequency of 200 and term 'bbb' 100
 			for (int i = 0; i < 100; i++)
@@ -59,7 +58,7 @@
 			VerifyDocFreq();
 			
 			// merge segments by optimizing the index
-            writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
+			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
 			writer.Optimize();
 			writer.Close();
 			
@@ -71,10 +70,10 @@
 		public virtual void  TestPrevTermAtEnd()
 		{
 			Directory dir = new MockRAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			AddDoc(writer, "aaa bbb");
 			writer.Close();
-			IndexReader reader = IndexReader.Open(dir);
+			SegmentReader reader = SegmentReader.GetOnlySegmentReader(dir);
 			SegmentTermEnum termEnum = (SegmentTermEnum) reader.Terms();
 			Assert.IsTrue(termEnum.Next());
 			Assert.AreEqual("aaa", termEnum.Term().Text());
@@ -122,7 +121,7 @@
 		
 		private void  AddDoc(IndexWriter writer, System.String value_Renamed)
 		{
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field("content", value_Renamed, Field.Store.NO, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 		}

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestStressIndexing.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestStressIndexing.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestStressIndexing.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestStressIndexing.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,23 +19,26 @@
 
 using NUnit.Framework;
 
-using Lucene.Net.Util;
-using Lucene.Net.Store;
-using Lucene.Net.Documents;
 using Lucene.Net.Analysis;
-using Lucene.Net.Search;
+using Lucene.Net.Documents;
 using Lucene.Net.QueryParsers;
+using Lucene.Net.Store;
+using Lucene.Net.Util;
+using Lucene.Net.Search;
+using English = Lucene.Net.Util.English;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using _TestUtil = Lucene.Net.Util._TestUtil;
 
 namespace Lucene.Net.Index
 {
 	
-	[TestFixture]
-	public class TestStressIndexing : LuceneTestCase
+    [TestFixture]
+	public class TestStressIndexing:LuceneTestCase
 	{
 		private static readonly Analyzer ANALYZER = new SimpleAnalyzer();
-		private static readonly System.Random RANDOM = new System.Random();
+		private System.Random RANDOM;
 		
-		abstract public class TimedThread : SupportClass.ThreadClass
+		abstract public class TimedThread:SupportClass.ThreadClass
 		{
 			internal bool failed;
 			internal int count;
@@ -51,13 +54,13 @@
 			
 			override public void  Run()
 			{
-				long stopTime = (System.DateTime.Now.Ticks - 621355968000000000) / 10000 + 1000 * RUN_TIME_SEC;
+				long stopTime = System.DateTime.Now.Millisecond + 1000 * RUN_TIME_SEC;
 				
 				count = 0;
 				
 				try
 				{
-					while ((System.DateTime.Now.Ticks - 621355968000000000) / 10000 < stopTime && !AnyErrors())
+					while (System.DateTime.Now.Millisecond < stopTime && !AnyErrors())
 					{
 						DoWork();
 						count++;
@@ -65,8 +68,8 @@
 				}
 				catch (System.Exception e)
 				{
-                    System.Console.Out.WriteLine(System.Threading.Thread.CurrentThread + ": exc");
-                    System.Console.Out.WriteLine(e.StackTrace);
+					System.Console.Out.WriteLine(SupportClass.ThreadClass.Current() + ": exc");
+					System.Console.Out.WriteLine(e.StackTrace);
 					failed = true;
 				}
 			}
@@ -80,14 +83,28 @@
 			}
 		}
 		
-		private class IndexerThread : TimedThread
+		private class IndexerThread:TimedThread
 		{
+			private void  InitBlock(TestStressIndexing enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestStressIndexing enclosingInstance;
+			public TestStressIndexing Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
 			internal IndexWriter writer;
-			//new public int count;
+			new public int count;
 			internal int nextID;
 			
-			public IndexerThread(IndexWriter writer, TimedThread[] threads):base(threads)
+			public IndexerThread(TestStressIndexing enclosingInstance, IndexWriter writer, TimedThread[] threads):base(threads)
 			{
+				InitBlock(enclosingInstance);
 				this.writer = writer;
 			}
 			
@@ -97,7 +114,7 @@
 				for (int j = 0; j < 10; j++)
 				{
 					Document d = new Document();
-					int n = Lucene.Net.Index.TestStressIndexing.RANDOM.Next();
+					int n = Enclosing_Instance.RANDOM.Next();
 					d.Add(new Field("id", System.Convert.ToString(nextID++), Field.Store.YES, Field.Index.NOT_ANALYZED));
 					d.Add(new Field("contents", English.IntToEnglish(n), Field.Store.NO, Field.Index.ANALYZED));
 					writer.AddDocument(d);
@@ -113,7 +130,7 @@
 			}
 		}
 		
-		private class SearcherThread : TimedThread
+		private class SearcherThread:TimedThread
 		{
 			private Directory directory;
 			
@@ -139,42 +156,40 @@
 			IndexWriter modifier = new IndexWriter(directory, autoCommit, ANALYZER, true);
 			
 			modifier.SetMaxBufferedDocs(10);
-
+			
 			TimedThread[] threads = new TimedThread[4];
-            int numThread = 0;
+			int numThread = 0;
 			
 			if (mergeScheduler != null)
 				modifier.SetMergeScheduler(mergeScheduler);
 			
 			// One modifier that writes 10 docs then removes 5, over
 			// and over:
-			IndexerThread indexerThread = new IndexerThread(modifier, threads);
+			IndexerThread indexerThread = new IndexerThread(this, modifier, threads);
 			threads[numThread++] = indexerThread;
 			indexerThread.Start();
-
-            IndexerThread indexerThread2 = new IndexerThread(modifier, threads);
-            threads[numThread++] = indexerThread2;
-            indexerThread2.Start();
-
-            // Two searchers that constantly just re-instantiate the
-            // searcher:
-            SearcherThread searcherThread1 = new SearcherThread(directory, threads);
-            threads[numThread++] = searcherThread1;
-            searcherThread1.Start();
-
-            SearcherThread searcherThread2 = new SearcherThread(directory, threads);
-            threads[numThread++] = searcherThread2;
-            searcherThread2.Start();
-
-            for (int i = 0; i < threads.Length; i++)
-                //threads[i].Join();
-                if (threads[i] != null) threads[i].Join();
-
+			
+			IndexerThread indexerThread2 = new IndexerThread(this, modifier, threads);
+			threads[numThread++] = indexerThread2;
+			indexerThread2.Start();
+			
+			// Two searchers that constantly just re-instantiate the
+			// searcher:
+			SearcherThread searcherThread1 = new SearcherThread(directory, threads);
+			threads[numThread++] = searcherThread1;
+			searcherThread1.Start();
+			
+			SearcherThread searcherThread2 = new SearcherThread(directory, threads);
+			threads[numThread++] = searcherThread2;
+			searcherThread2.Start();
+			
+			for (int i = 0; i < numThread; i++)
+				threads[i].Join();
+			
 			modifier.Close();
-
-            for (int i = 0; i < threads.Length; i++)
-                //Assert.IsTrue(!((TimedThread)threads[i]).failed);
-                if (threads[i] != null) Assert.IsTrue(!((TimedThread)threads[i]).failed);
+			
+			for (int i = 0; i < numThread; i++)
+				Assert.IsTrue(!((TimedThread) threads[i]).failed);
 			
 			//System.out.println("    Writer: " + indexerThread.count + " iterations");
 			//System.out.println("Searcher 1: " + searcherThread1.count + " searchers created");
@@ -188,44 +203,40 @@
 		[Test]
 		public virtual void  TestStressIndexAndSearching()
 		{
-            ////for (int i = 0; i < 10; i++)
-            ////{
-            //// RAMDir
-            //Directory directory = new MockRAMDirectory();
-            //RunStressTest(directory, true, null);
-            //directory.Close();
-Directory directory;
-
-            // FSDir
-            System.String tempDir = System.IO.Path.GetTempPath();
-            System.IO.FileInfo dirPath = new System.IO.FileInfo(tempDir + "\\" + "lucene.test.stress");
-            directory = FSDirectory.GetDirectory(dirPath);
-            RunStressTest(directory, true, null);
-            directory.Close();
-
-//System.Console.WriteLine("Index Path: {0}", dirPath);
-
-            //// With ConcurrentMergeScheduler, in RAMDir
-            //directory = new MockRAMDirectory();
-            //RunStressTest(directory, true, new ConcurrentMergeScheduler());
-            //directory.Close();
-
-            // With ConcurrentMergeScheduler, in FSDir
-            directory = FSDirectory.GetDirectory(dirPath);
-            RunStressTest(directory, true, new ConcurrentMergeScheduler());
-            directory.Close();
-
-            //// With ConcurrentMergeScheduler and autoCommit=false, in RAMDir
-            //directory = new MockRAMDirectory();
-            //RunStressTest(directory, false, new ConcurrentMergeScheduler());
-            //directory.Close();
-
-            // With ConcurrentMergeScheduler and autoCommit=false, in FSDir
-            directory = FSDirectory.GetDirectory(dirPath);
-            RunStressTest(directory, false, new ConcurrentMergeScheduler());
-            directory.Close();
-
-            _TestUtil.RmDir(dirPath);
+			RANDOM = NewRandom();
+			
+			// RAMDir
+			Directory directory = new MockRAMDirectory();
+			RunStressTest(directory, true, null);
+			directory.Close();
+			
+			// FSDir
+			System.IO.FileInfo dirPath = _TestUtil.GetTempDir("lucene.test.stress");
+			directory = FSDirectory.Open(dirPath);
+			RunStressTest(directory, true, null);
+			directory.Close();
+			
+			// With ConcurrentMergeScheduler, in RAMDir
+			directory = new MockRAMDirectory();
+			RunStressTest(directory, true, new ConcurrentMergeScheduler());
+			directory.Close();
+			
+			// With ConcurrentMergeScheduler, in FSDir
+			directory = FSDirectory.Open(dirPath);
+			RunStressTest(directory, true, new ConcurrentMergeScheduler());
+			directory.Close();
+			
+			// With ConcurrentMergeScheduler and autoCommit=false, in RAMDir
+			directory = new MockRAMDirectory();
+			RunStressTest(directory, false, new ConcurrentMergeScheduler());
+			directory.Close();
+			
+			// With ConcurrentMergeScheduler and autoCommit=false, in FSDir
+			directory = FSDirectory.Open(dirPath);
+			RunStressTest(directory, false, new ConcurrentMergeScheduler());
+			directory.Close();
+			
+			_TestUtil.RmDir(dirPath);
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestStressIndexing2.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestStressIndexing2.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestStressIndexing2.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestStressIndexing2.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -17,29 +17,27 @@
 
 using System;
 
+using NUnit.Framework;
+
+using Lucene.Net.Analysis;
 using Lucene.Net.Documents;
 using Lucene.Net.Store;
-using Lucene.Net.Analysis;
+using StringHelper = Lucene.Net.Util.StringHelper;
+using TermQuery = Lucene.Net.Search.TermQuery;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 using _TestUtil = Lucene.Net.Util._TestUtil;
-using TermQuery = Lucene.Net.Search.TermQuery;
-
-using NUnit.Framework;
 
 namespace Lucene.Net.Index
 {
-	[TestFixture]
-	public class TestStressIndexing2 : LuceneTestCase
+	
+    [TestFixture]
+	public class TestStressIndexing2:LuceneTestCase
 	{
 		internal class AnonymousClassComparator : System.Collections.IComparer
 		{
-            Fieldable f1, f2;
 			public virtual int Compare(System.Object o1, System.Object o2)
 			{
-                if (o1 == o2) return 0;
-                f1 = (Fieldable)o1;
-                f2 = (Fieldable)o2;
-                return String.CompareOrdinal(f1.Name() + f1.StringValue(), f2.Name() + f2.StringValue());
+				return String.CompareOrdinal(((Fieldable) o1).Name(), ((Fieldable) o2).Name());
 			}
 		}
 		internal static int maxFields = 4;
@@ -48,32 +46,71 @@
 		internal static bool autoCommit = false;
 		internal static int mergeFactor = 3;
 		internal static int maxBufferedDocs = 3;
-		internal static int seed = 0;
+		new internal static int seed = 0;
+		
+		internal System.Random r;
+		
+		public class MockIndexWriter:IndexWriter
+		{
+			private void  InitBlock(TestStressIndexing2 enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestStressIndexing2 enclosingInstance;
+			public TestStressIndexing2 Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			
+			public MockIndexWriter(TestStressIndexing2 enclosingInstance, Directory dir, bool autoCommit, Analyzer a, bool create):base(dir, autoCommit, a, create)
+			{
+				InitBlock(enclosingInstance);
+			}
+			
+			public /*internal*/ override bool TestPoint(System.String name)
+			{
+				//      if (name.equals("startCommit")) {
+				if (Enclosing_Instance.r.Next(4) == 2)
+					System.Threading.Thread.Sleep(0);
+				return true;
+			}
+		}
+		
+		[Test]
+		public virtual void  TestRandomIWReader()
+		{
+			this.r = NewRandom();
+			Directory dir = new MockRAMDirectory();
+			
+			// TODO: verify equals using IW.getReader
+			DocsAndWriter dw = IndexRandomIWReader(10, 100, 100, dir);
+			IndexReader r = dw.writer.GetReader();
+			dw.writer.Commit();
+			VerifyEquals(r, dir, "id");
+			r.Close();
+			dw.writer.Close();
+			dir.Close();
+		}
 		
-		internal static System.Random r = new System.Random((System.Int32) 0);
-
-        public class MockIndexWriter : IndexWriter
-        {
-            public MockIndexWriter(Directory dir, bool autoCommit, Analyzer a, bool create)
-                : base(dir, autoCommit, a, create)
-            {
-            }
-
-            protected override bool TestPoint(string name)
-            {
-                if (TestStressIndexing2.r.Next(4) == 2)
-                    System.Threading.Thread.Sleep(1);
-                return true;
-            }
-        }
-
 		[Test]
 		public virtual void  TestRandom()
 		{
+			r = NewRandom();
 			Directory dir1 = new MockRAMDirectory();
+			// dir1 = FSDirectory.open("foofoofoo");
 			Directory dir2 = new MockRAMDirectory();
-            System.Collections.IDictionary docs = IndexRandom(10, 100, 100, dir1);
+			// mergeFactor=2; maxBufferedDocs=2; Map docs = indexRandom(1, 3, 2, dir1);
+			System.Collections.IDictionary docs = IndexRandom(10, 100, 100, dir1);
 			IndexSerial(docs, dir2);
+			
+			// verifying verify
+			// verifyEquals(dir1, dir1, "id");
+			// verifyEquals(dir2, dir2, "id");
+			
 			VerifyEquals(dir1, dir2, "id");
 		}
 		
@@ -81,8 +118,9 @@
 		public virtual void  TestMultiConfig()
 		{
 			// test lots of smaller different params together
-            for (int i = 0; i < 100; i++)
-            {
+			r = NewRandom();
+			for (int i = 0; i < 100; i++)
+			{
 				// increase iterations for better testing
 				sameFieldOrder = r.NextDouble() > 0.5;
 				autoCommit = r.NextDouble() > 0.5;
@@ -93,10 +131,9 @@
 				int nThreads = r.Next(5) + 1;
 				int iter = r.Next(10) + 1;
 				int range = r.Next(20) + 1;
-				
 				Directory dir1 = new MockRAMDirectory();
 				Directory dir2 = new MockRAMDirectory();
-                System.Collections.IDictionary docs = IndexRandom(nThreads, iter, range, dir1);
+				System.Collections.IDictionary docs = IndexRandom(nThreads, iter, range, dir1);
 				IndexSerial(docs, dir2);
 				VerifyEquals(dir1, dir2, "id");
 			}
@@ -111,57 +148,120 @@
 		// indexing threads to test that IndexWriter does correctly synchronize
 		// everything.
 		
+		public class DocsAndWriter
+		{
+			internal System.Collections.IDictionary docs;
+			internal IndexWriter writer;
+		}
+		
+		public virtual DocsAndWriter IndexRandomIWReader(int nThreads, int iterations, int range, Directory dir)
+		{
+			System.Collections.Hashtable docs = new System.Collections.Hashtable();
+			IndexWriter w = new MockIndexWriter(this, dir, autoCommit, new WhitespaceAnalyzer(), true);
+			w.SetUseCompoundFile(false);
+			
+			/***
+			w.setMaxMergeDocs(Integer.MAX_VALUE);
+			w.setMaxFieldLength(10000);
+			w.setRAMBufferSizeMB(1);
+			w.setMergeFactor(10);
+			***/
+			
+			// force many merges
+			w.SetMergeFactor(mergeFactor);
+			w.SetRAMBufferSizeMB(.1);
+			w.SetMaxBufferedDocs(maxBufferedDocs);
+			
+			threads = new IndexingThread[nThreads];
+			for (int i = 0; i < threads.Length; i++)
+			{
+				IndexingThread th = new IndexingThread();
+				th.w = w;
+				th.base_Renamed = 1000000 * i;
+				th.range = range;
+				th.iterations = iterations;
+				threads[i] = th;
+			}
+			
+			for (int i = 0; i < threads.Length; i++)
+			{
+				threads[i].Start();
+			}
+			for (int i = 0; i < threads.Length; i++)
+			{
+				threads[i].Join();
+			}
+			
+			// w.optimize();
+			//w.close();    
+			
+			for (int i = 0; i < threads.Length; i++)
+			{
+				IndexingThread th = threads[i];
+				lock (th)
+				{
+					SupportClass.CollectionsHelper.AddAllIfNotContains(docs, th.docs);
+				}
+			}
+			
+			_TestUtil.CheckIndex(dir);
+			DocsAndWriter dw = new DocsAndWriter();
+			dw.docs = docs;
+			dw.writer = w;
+			return dw;
+		}
+		
 		public virtual System.Collections.IDictionary IndexRandom(int nThreads, int iterations, int range, Directory dir)
 		{
-            System.Collections.Hashtable docs = new System.Collections.Hashtable();
-            for (int iter = 0; iter < 3; iter++)
-            {
-                IndexWriter w = new MockIndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-                w.SetUseCompoundFile(false);
-
-                // force many merges
-                w.SetMergeFactor(mergeFactor);
-                w.SetRAMBufferSizeMB(.1);
-                w.SetMaxBufferedDocs(maxBufferedDocs);
-
-                threads = new IndexingThread[nThreads];
-                for (int i = 0; i < threads.Length; i++)
-                {
-                    IndexingThread th = new IndexingThread();
-                    th.w = w;
-                    th.base_Renamed = 1000000 * i;
-                    th.range = range;
-                    th.iterations = iterations;
-                    threads[i] = th;
-                }
-
-                for (int i = 0; i < threads.Length; i++)
-                {
-                    threads[i].Start();
-                }
-                for (int i = 0; i < threads.Length; i++)
-                {
-                    threads[i].Join();
-                }
-
-                // w.optimize();
-                w.Close();
-
-                for (int i = 0; i < threads.Length; i++)
-                {
-                    IndexingThread th = threads[i];
-                    lock (th)
-                    {
+			System.Collections.IDictionary docs = new System.Collections.Hashtable();
+			for (int iter = 0; iter < 3; iter++)
+			{
+				IndexWriter w = new MockIndexWriter(this, dir, autoCommit, new WhitespaceAnalyzer(), true);
+				w.SetUseCompoundFile(false);
+				
+				// force many merges
+				w.SetMergeFactor(mergeFactor);
+				w.SetRAMBufferSizeMB(.1);
+				w.SetMaxBufferedDocs(maxBufferedDocs);
+				
+				threads = new IndexingThread[nThreads];
+				for (int i = 0; i < threads.Length; i++)
+				{
+					IndexingThread th = new IndexingThread();
+					th.w = w;
+					th.base_Renamed = 1000000 * i;
+					th.range = range;
+					th.iterations = iterations;
+					threads[i] = th;
+				}
+				
+				for (int i = 0; i < threads.Length; i++)
+				{
+					threads[i].Start();
+				}
+				for (int i = 0; i < threads.Length; i++)
+				{
+					threads[i].Join();
+				}
+				
+				// w.optimize();
+				w.Close();
+				
+				for (int i = 0; i < threads.Length; i++)
+				{
+					IndexingThread th = threads[i];
+					lock (th)
+					{
                         System.Collections.IEnumerator e = th.docs.Keys.GetEnumerator();
                         while (e.MoveNext())
                         {
                             docs[e.Current] = th.docs[e.Current];
                         }
-                    }
-                }
-            }
-
-            _TestUtil.CheckIndex(dir);
+					}
+				}
+			}
+			
+			_TestUtil.CheckIndex(dir);
 			
 			return docs;
 		}
@@ -178,18 +278,9 @@
 				Document d = (Document) iter.Current;
 				System.Collections.ArrayList fields = new System.Collections.ArrayList();
 				fields.AddRange(d.GetFields());
-                
-                // nonono - can't do this (below)
-                //
-                // if multiple fields w/ same name, each instance must be
-                // added in the same order as orginal doc, as the fields
-                // are effectively concatendated
-                //
-                // term position/offset information must be maintained 
-                
-                // put fields in same order each time
-                //fields.Sort(fieldNameComparator);
-
+				// put fields in same order each time
+				SupportClass.CollectionsHelper.Sort(fields, fieldNameComparator);
+				
 				Document d1 = new Document();
 				d1.SetBoost(d.GetBoost());
 				for (int i = 0; i < fields.Count; i++)
@@ -203,6 +294,13 @@
 			w.Close();
 		}
 		
+		public static void  VerifyEquals(IndexReader r1, Directory dir2, System.String idField)
+		{
+			IndexReader r2 = IndexReader.Open(dir2);
+			VerifyEquals(r1, r2, idField);
+			r2.Close();
+		}
+		
 		public static void  VerifyEquals(Directory dir1, Directory dir2, System.String idField)
 		{
 			IndexReader r1 = IndexReader.Open(dir1);
@@ -224,7 +322,7 @@
 			TermDocs termDocs2 = r2.TermDocs();
 			
 			// create mapping from id2 space to id2 based on idField
-			idField = String.Intern(idField);
+			idField = StringHelper.Intern(idField);
 			TermEnum termEnum = r1.Terms(new Term(idField, ""));
 			do 
 			{
@@ -233,14 +331,14 @@
 					break;
 				
 				termDocs1.Seek(termEnum);
-                if (!termDocs1.Next())
-                {
-                    // This doc is deleted and wasn't replaced
-                    termDocs2.Seek(termEnum);
-                    Assert.IsFalse(termDocs2.Next());
-                    continue;
-                }
-
+				if (!termDocs1.Next())
+				{
+					// This doc is deleted and wasn't replaced
+					termDocs2.Seek(termEnum);
+					Assert.IsFalse(termDocs2.Next());
+					continue;
+				}
+				
 				int id1 = termDocs1.Doc();
 				Assert.IsFalse(termDocs1.Next());
 				
@@ -252,7 +350,17 @@
 				r2r1[id2] = id1;
 				
 				// verify stored fields are equivalent
-				VerifyEquals(r1.Document(id1), r2.Document(id2));
+				try
+				{
+					VerifyEquals(r1.Document(id1), r2.Document(id2));
+				}
+				catch (System.Exception t)
+				{
+					System.Console.Out.WriteLine("FAILED id=" + term + " id1=" + id1 + " id2=" + id2 + " term=" + term);
+					System.Console.Out.WriteLine("  d1=" + r1.Document(id1));
+					System.Console.Out.WriteLine("  d2=" + r2.Document(id2));
+					throw t;
+				}
 				
 				try
 				{
@@ -366,30 +474,19 @@
 		
 		public static void  VerifyEquals(Document d1, Document d2)
 		{
-			System.Collections.ArrayList ff1 = new System.Collections.ArrayList(d1.GetFields());
-			System.Collections.ArrayList ff2 = new System.Collections.ArrayList(d2.GetFields());
-
-			ff1.Sort(fieldNameComparator);
-			ff2.Sort(fieldNameComparator);
+			System.Collections.IList ff1 = d1.GetFields();
+			System.Collections.IList ff2 = d2.GetFields();
+			
+			SupportClass.CollectionsHelper.Sort(ff1, fieldNameComparator);
+			SupportClass.CollectionsHelper.Sort(ff2, fieldNameComparator);
 			
 			if (ff1.Count != ff2.Count)
 			{
-                // print out whole doc on error
-                System.Console.Write("Doc 1:");
-                for (int j = 0; j < ff1.Count; j++)
-                {
-                    Fieldable field = (Fieldable)ff1[j];
-                    System.Console.Write(" {0}={1};", field.Name(), field.StringValue());
-                }
-                System.Console.WriteLine();
-                System.Console.Write("Doc 2:");
-                for (int j = 0; j < ff2.Count; j++)
-                {
-                    Fieldable field = (Fieldable)ff2[j];
-                    System.Console.Write(" {0}={1};", field.Name(), field.StringValue());
-                }
-                System.Console.WriteLine(); Assert.AreEqual(ff1.Count, ff2.Count);
-			}			
+				System.Console.Out.WriteLine(SupportClass.CollectionsHelper.CollectionToString(ff1));
+				System.Console.Out.WriteLine(SupportClass.CollectionsHelper.CollectionToString(ff2));
+				Assert.AreEqual(ff1.Count, ff2.Count);
+			}
+			
 			
 			for (int i = 0; i < ff1.Count; i++)
 			{
@@ -407,21 +504,9 @@
 					if (!s1.Equals(s2))
 					{
 						// print out whole doc on error
-                        System.Console.Write("Doc 1:");
-                        for (int j = 0; j < ff1.Count; j++)
-                        {
-                            Fieldable field = (Fieldable)ff1[j];
-                            System.Console.Write(" {0}={1};", field.Name(), field.StringValue());
-                        }
-                        System.Console.WriteLine();
-                        System.Console.Write("Doc 2:");
-                        for (int j = 0; j < ff2.Count; j++)
-                        {
-                            Fieldable field = (Fieldable)ff2[j];
-                            System.Console.Write(" {0}={1};", field.Name(), field.StringValue());
-                        }
-                        System.Console.WriteLine();
-                        Assert.AreEqual(s1, s2);
+						System.Console.Out.WriteLine(SupportClass.CollectionsHelper.CollectionToString(ff1));
+						System.Console.Out.WriteLine(SupportClass.CollectionsHelper.CollectionToString(ff2));
+						Assert.AreEqual(s1, s2);
 					}
 				}
 			}
@@ -441,9 +526,11 @@
 			{
 				TermFreqVector v1 = d1[i];
 				TermFreqVector v2 = d2[i];
-                if (v1 == null || v2 == null)
-                    System.Console.Out.WriteLine("v1=" + v1 + " v2=" + v2 + " i=" + i + " of " + d1.Length);
-                Assert.AreEqual(v1.Size(), v2.Size());
+				if (v1 == null || v2 == null)
+				{
+					System.Console.Out.WriteLine("v1=" + v1 + " v2=" + v2 + " i=" + i + " of " + d1.Length);
+				}
+				Assert.AreEqual(v1.Size(), v2.Size());
 				int numTerms = v1.Size();
 				System.String[] terms1 = v1.GetTerms();
 				System.String[] terms2 = v2.GetTerms();
@@ -473,7 +560,7 @@
 							Assert.IsTrue(offsets2 != null);
 						for (int k = 0; k < pos1.Length; k++)
 						{
-                            Assert.AreEqual(pos1[k], pos2[k]);
+							Assert.AreEqual(pos1[k], pos2[k]);
 							if (offsets1 != null)
 							{
 								Assert.AreEqual(offsets1[k].GetStartOffset(), offsets2[k].GetStartOffset());
@@ -485,7 +572,7 @@
 			}
 		}
 		
-		internal class IndexingThread : SupportClass.ThreadClass
+		internal class IndexingThread:SupportClass.ThreadClass
 		{
 			internal IndexWriter w;
 			internal int base_Renamed;
@@ -498,65 +585,65 @@
 			{
 				return r.Next(lim);
 			}
-
-            // start is inclusive and end is exclusive
-            public int NextInt(int start, int end)
-            {
-                return start + r.Next(end - start);
-            }
-
-            internal char[] buffer = new char[100];
-
-            private int AddUTF8Token(int start)
-            {
-                int end = start + NextInt(20);
-                if (buffer.Length < 1 + end)
-                {
-                    char[] newBuffer = new char[(int)((1 + end) * 1.25)];
-                    System.Array.Copy(buffer, 0, newBuffer, 0, buffer.Length);
-                    buffer = newBuffer;
-                }
-
-                for (int i = start; i < end; i++)
-                {
-                    int t = NextInt(6);
-                    if (0 == t && i < end - 1)
-                    {
-                        // make a surrogate pair
-                        // high surrogate
-                        buffer[i++] = (char)NextInt(0xD800, 0xDC00);
-                        // low surrogate
-                        buffer[i] = (char)NextInt(0xDC00, 0xE000);
-                    }
-                    else if (t <= 1)
-                        buffer[i] = (char)NextInt(0x80);
-                    else if (t == 2)
-                        buffer[i] = (char)NextInt(0x80, 0x800);
-                    else if (t == 3)
-                        buffer[i] = (char)NextInt(0x800, 0xD800);
-                    else if (t == 4)
-                        buffer[i] = (char)NextInt(0xE000, 0xFFFF);
-                    else if (t == 5)
-                    {
-                        // illegal unpaired surrogate
-                        if (r.Next(2) == 0)
-                            buffer[i] = (char)NextInt(0xD800, 0xDC00);
-                        else
-                            buffer[i] = (char)NextInt(0xDC00, 0xE000);
-                    }
-                }
-                buffer[end] = ' ';
-                return 1 + end;
-            }
-
-            public virtual System.String GetString(int nTokens)
+			
+			// start is inclusive and end is exclusive
+			public virtual int NextInt(int start, int end)
+			{
+				return start + r.Next(end - start);
+			}
+			
+			internal char[] buffer = new char[100];
+			
+			private int AddUTF8Token(int start)
+			{
+				int end = start + NextInt(20);
+				if (buffer.Length < 1 + end)
+				{
+					char[] newBuffer = new char[(int) ((1 + end) * 1.25)];
+					Array.Copy(buffer, 0, newBuffer, 0, buffer.Length);
+					buffer = newBuffer;
+				}
+				
+				for (int i = start; i < end; i++)
+				{
+					int t = NextInt(6);
+					if (0 == t && i < end - 1)
+					{
+						// Make a surrogate pair
+						// High surrogate
+						buffer[i++] = (char) NextInt(0xd800, 0xdc00);
+						// Low surrogate
+						buffer[i] = (char) NextInt(0xdc00, 0xe000);
+					}
+					else if (t <= 1)
+						buffer[i] = (char) NextInt(0x80);
+					else if (2 == t)
+						buffer[i] = (char) NextInt(0x80, 0x800);
+					else if (3 == t)
+						buffer[i] = (char) NextInt(0x800, 0xd800);
+					else if (4 == t)
+						buffer[i] = (char) NextInt(0xe000, 0xffff);
+					else if (5 == t)
+					{
+						// Illegal unpaired surrogate
+						if (r.NextDouble() > 0.5)
+							buffer[i] = (char) NextInt(0xd800, 0xdc00);
+						else
+							buffer[i] = (char) NextInt(0xdc00, 0xe000);
+					}
+				}
+				buffer[end] = ' ';
+				return 1 + end;
+			}
+			
+			public virtual System.String GetString(int nTokens)
 			{
 				nTokens = nTokens != 0?nTokens:r.Next(4) + 1;
-
-                // 1/2 the time, make a random UTF-8 string
-                if (r.Next(2) == 0)
-                    return GetUTF8String(nTokens);
-
+				
+				// Half the time make a random UTF8 string
+				if (r.NextDouble() > 0.5)
+					return GetUTF8String(nTokens);
+				
 				// avoid StringBuffer because it adds extra synchronization.
 				char[] arr = new char[nTokens * 2];
 				for (int i = 0; i < nTokens; i++)
@@ -566,28 +653,28 @@
 				}
 				return new System.String(arr);
 			}
-
-            public string GetUTF8String(int nTokens)
-            {
-                int upto = 0;
-                SupportClass.CollectionsSupport.ArrayFill(buffer, (char)0);
-                for (int i = 0; i < nTokens; i++)
-                    upto = AddUTF8Token(upto);
-                return new string(buffer, 0, upto);
-            }
-
-            public string GetIdString()
-            {
-                return "" + (base_Renamed + NextInt(range));
-            }
-
-            public virtual void IndexDoc()
+			
+			public virtual System.String GetUTF8String(int nTokens)
+			{
+				int upto = 0;
+				SupportClass.CollectionsHelper.Fill(buffer, (char) 0);
+				for (int i = 0; i < nTokens; i++)
+					upto = AddUTF8Token(upto);
+				return new System.String(buffer, 0, upto);
+			}
+			
+			public virtual System.String GetIdString()
+			{
+				return System.Convert.ToString(base_Renamed + NextInt(range));
+			}
+			
+			public virtual void  IndexDoc()
 			{
 				Document d = new Document();
 				
 				System.Collections.ArrayList fields = new System.Collections.ArrayList();
-                System.String idString = GetIdString();
-				Field idField = new Field(idTerm.Field(), idString, Field.Store.YES, Field.Index.NOT_ANALYZED);
+				System.String idString = GetIdString();
+				Field idField = new Field(Lucene.Net.Index.TestStressIndexing2.idTerm.Field(), idString, Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS);
 				fields.Add(idField);
 				
 				int nFields = NextInt(Lucene.Net.Index.TestStressIndexing2.maxFields);
@@ -595,57 +682,57 @@
 				{
 					
 					Field.TermVector tvVal = Field.TermVector.NO;
-                    switch (NextInt(4))
-                    {
-
-                        case 0:
-                            tvVal = Field.TermVector.NO;
-                            break;
-
-                        case 1:
-                            tvVal = Field.TermVector.YES;
-                            break;
-
-                        case 2:
-                            tvVal = Field.TermVector.WITH_POSITIONS;
-                            break;
-
-                        case 3:
-                            tvVal = Field.TermVector.WITH_POSITIONS_OFFSETS;
-                            break;
-                    }
-
-                    switch (NextInt(4))
-                    {
-
-                        case 0:
-                            fields.Add(new Field("f" + NextInt(100), GetString(1), Field.Store.YES, Field.Index.NOT_ANALYZED, tvVal));
-                            break;
-
-                        case 1:
-                            fields.Add(new Field("f" + NextInt(100), GetString(0), Field.Store.NO, Field.Index.ANALYZED, tvVal));
-                            break;
-
-                        case 2:
-                            fields.Add(new Field("f" + NextInt(100), GetString(0), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
-                            break;
-
-                        case 3:
-                            fields.Add(new Field("f" + NextInt(100), GetString(Lucene.Net.Index.TestStressIndexing2.bigFieldSize), Field.Store.YES, Field.Index.ANALYZED, tvVal));
-                            break;
-                    }
-                }
+					switch (NextInt(4))
+					{
+						
+						case 0: 
+							tvVal = Field.TermVector.NO;
+							break;
+						
+						case 1: 
+							tvVal = Field.TermVector.YES;
+							break;
+						
+						case 2: 
+							tvVal = Field.TermVector.WITH_POSITIONS;
+							break;
+						
+						case 3: 
+							tvVal = Field.TermVector.WITH_POSITIONS_OFFSETS;
+							break;
+						}
+					
+					switch (NextInt(4))
+					{
+						
+						case 0: 
+							fields.Add(new Field("f" + NextInt(100), GetString(1), Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS, tvVal));
+							break;
+						
+						case 1: 
+							fields.Add(new Field("f" + NextInt(100), GetString(0), Field.Store.NO, Field.Index.ANALYZED, tvVal));
+							break;
+						
+						case 2: 
+							fields.Add(new Field("f" + NextInt(100), GetString(0), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
+							break;
+						
+						case 3: 
+							fields.Add(new Field("f" + NextInt(100), GetString(Lucene.Net.Index.TestStressIndexing2.bigFieldSize), Field.Store.YES, Field.Index.ANALYZED, tvVal));
+							break;
+						}
+				}
 				
 				if (Lucene.Net.Index.TestStressIndexing2.sameFieldOrder)
 				{
-					fields.Sort(Lucene.Net.Index.TestStressIndexing2.fieldNameComparator);
+					SupportClass.CollectionsHelper.Sort(fields, Lucene.Net.Index.TestStressIndexing2.fieldNameComparator);
 				}
 				else
 				{
 					// random placement of id field also
-                    int index = NextInt(fields.Count);
-                    fields[0] = fields[index];
-                    fields[index] = idField;
+					int index = NextInt(fields.Count);
+					fields[0] = fields[index];
+					fields[index] = idField;
 				}
 				
 				for (int i = 0; i < fields.Count; i++)
@@ -657,20 +744,20 @@
 				docs[idString] = d;
 			}
 			
-            public void DeleteDoc()
-            {
-                string idString = GetIdString();
-                w.DeleteDocuments(idTerm.CreateTerm(idString));
-                docs.Remove(idString);
-            }
-
-            public void DeleteByQuery()
-            {
-                string idString = GetIdString();
-                w.DeleteDocuments(new TermQuery(idTerm.CreateTerm(idString)));
-                docs.Remove(idString);
-            }
-
+			public virtual void  DeleteDoc()
+			{
+				System.String idString = GetIdString();
+				w.DeleteDocuments(Lucene.Net.Index.TestStressIndexing2.idTerm.CreateTerm(idString));
+				docs.Remove(idString);
+			}
+			
+			public virtual void  DeleteByQuery()
+			{
+				System.String idString = GetIdString();
+				w.DeleteDocuments(new TermQuery(Lucene.Net.Index.TestStressIndexing2.idTerm.CreateTerm(idString)));
+				docs.Remove(idString);
+			}
+			
 			override public void  Run()
 			{
 				try
@@ -678,33 +765,31 @@
 					r = new System.Random((System.Int32) (base_Renamed + range + Lucene.Net.Index.TestStressIndexing2.seed));
 					for (int i = 0; i < iterations; i++)
 					{
-                        int what = NextInt(100);
-                        if (what < 5)
-                        {
-                            DeleteDoc();
-                        }
-                        else if (what < 10)
-                        {
-                            DeleteByQuery();
-                        }
-                        else
-                        {
-						    IndexDoc();
-					    }
-                    }
+						int what = NextInt(100);
+						if (what < 5)
+						{
+							DeleteDoc();
+						}
+						else if (what < 10)
+						{
+							DeleteByQuery();
+						}
+						else
+						{
+							IndexDoc();
+						}
+					}
 				}
 				catch (System.Exception e)
 				{
 					System.Console.Error.WriteLine(e.StackTrace);
-					Assert.Fail(e.ToString());
+					Assert.Fail(e.ToString()); // TestCase.fail(e.ToString());
 				}
 				
-                //{DIGY - this unnecessary lines block the threads.
-                //  lock(docs) could be a solution also.
-                //lock (this)
-                //{
-                //    int generatedAux = docs.Count;
-                //}
+				lock (this)
+				{
+					int generatedAux = docs.Count;
+				}
 			}
 		}
 		static TestStressIndexing2()
@@ -712,4 +797,4 @@
 			fieldNameComparator = new AnonymousClassComparator();
 		}
 	}
-}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestTerm.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestTerm.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestTerm.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestTerm.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -24,8 +24,8 @@
 namespace Lucene.Net.Index
 {
 	
-	[TestFixture]
-	public class TestTerm : LuceneTestCase
+    [TestFixture]
+	public class TestTerm:LuceneTestCase
 	{
 		
 		[Test]

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestTermVectorsReader.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,19 +19,21 @@
 
 using NUnit.Framework;
 
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
+using OffsetAttribute = Lucene.Net.Analysis.Tokenattributes.OffsetAttribute;
+using PositionIncrementAttribute = Lucene.Net.Analysis.Tokenattributes.PositionIncrementAttribute;
+using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using Analyzer = Lucene.Net.Analysis.Analyzer;
-using Token = Lucene.Net.Analysis.Token;
-using TokenStream = Lucene.Net.Analysis.TokenStream;
 
 namespace Lucene.Net.Index
 {
 	
-	[TestFixture]
-	public class TestTermVectorsReader : LuceneTestCase
+    [TestFixture]
+	public class TestTermVectorsReader:LuceneTestCase
 	{
 		private void  InitBlock()
 		{
@@ -51,7 +53,7 @@
 		private FieldInfos fieldInfos = new FieldInfos();
 		private static int TERM_FREQ = 3;
 		
-		public TestTermVectorsReader():base()
+		public TestTermVectorsReader(System.String s):base(s)
 		{
 			InitBlock();
 		}
@@ -96,7 +98,7 @@
 			fieldInfos.add(testFields[i], true, true, testFieldsStorePos[i], testFieldsStoreOff[i]);
 			}
 			*/
-			System.Random random = new System.Random();
+			
 			System.Array.Sort(testTerms);
 			int tokenUpto = 0;
 			for (int i = 0; i < testTerms.Length; i++)
@@ -107,7 +109,7 @@
 				for (int j = 0; j < TERM_FREQ; j++)
 				{
 					// positions are always sorted in increasing order
-					positions[i][j] = (int) (j * 10 + random.NextDouble() * 10);
+					positions[i][j] = (int) (j * 10 + (new System.Random().NextDouble()) * 10);
 					// offsets are always sorted in increasing order
 					offsets[i][j] = new TermVectorOffsetInfo(j * 10, j * 10 + testTerms[i].Length);
 					TestToken token = tokens[tokenUpto++] = new TestToken(this);
@@ -147,12 +149,8 @@
 			fieldInfos = new FieldInfos(dir, seg + "." + IndexFileNames.FIELD_INFOS_EXTENSION);
 		}
 		
-		private class MyTokenStream : TokenStream
+		private class MyTokenStream:TokenStream
 		{
-			public MyTokenStream(TestTermVectorsReader enclosingInstance)
-			{
-				InitBlock(enclosingInstance);
-			}
 			private void  InitBlock(TestTermVectorsReader enclosingInstance)
 			{
 				this.enclosingInstance = enclosingInstance;
@@ -167,24 +165,42 @@
 				
 			}
 			internal int tokenUpto;
-			public override Token Next(Token reusableToken)
+			
+			internal TermAttribute termAtt;
+			internal PositionIncrementAttribute posIncrAtt;
+			internal OffsetAttribute offsetAtt;
+			
+			public MyTokenStream(TestTermVectorsReader enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+				termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
+				posIncrAtt = (PositionIncrementAttribute) AddAttribute(typeof(PositionIncrementAttribute));
+				offsetAtt = (OffsetAttribute) AddAttribute(typeof(OffsetAttribute));
+			}
+			
+			public override bool IncrementToken()
 			{
 				if (tokenUpto >= Enclosing_Instance.tokens.Length)
-					return null;
+					return false;
 				else
 				{
 					TestToken testToken = Enclosing_Instance.tokens[tokenUpto++];
-                    reusableToken.Reinit(testToken.text, testToken.startOffset, testToken.endOffset);
+					termAtt.SetTermBuffer(testToken.text);
+					offsetAtt.SetOffset(testToken.startOffset, testToken.endOffset);
 					if (tokenUpto > 1)
-						reusableToken.SetPositionIncrement(testToken.pos - Enclosing_Instance.tokens[tokenUpto - 2].pos);
+					{
+						posIncrAtt.SetPositionIncrement(testToken.pos - Enclosing_Instance.tokens[tokenUpto - 2].pos);
+					}
 					else
-                        reusableToken.SetPositionIncrement(testToken.pos + 1);
-                    return reusableToken;
+					{
+						posIncrAtt.SetPositionIncrement(testToken.pos + 1);
+					}
+					return true;
 				}
 			}
 		}
 		
-		private class MyAnalyzer : Analyzer
+		private class MyAnalyzer:Analyzer
 		{
 			public MyAnalyzer(TestTermVectorsReader enclosingInstance)
 			{
@@ -333,7 +349,7 @@
 			//three fields, 4 terms, all terms are the same
 			Assert.IsTrue(set_Renamed.Count == 4, "set Size: " + set_Renamed.Count + " is not: " + 4);
 			//Check offsets and positions
-			for (System.Collections.IEnumerator iterator = set_Renamed.Keys.GetEnumerator(); iterator.MoveNext(); )
+			for (System.Collections.IEnumerator iterator = set_Renamed.GetEnumerator(); iterator.MoveNext(); )
 			{
 				TermVectorEntry tve = (TermVectorEntry) iterator.Current;
 				Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
@@ -348,7 +364,7 @@
 			//three fields, 4 terms, all terms are the same
 			Assert.IsTrue(set_Renamed.Count == 4, "set Size: " + set_Renamed.Count + " is not: " + 4);
 			//Should have offsets and positions b/c we are munging all the fields together
-			for (System.Collections.IEnumerator iterator = set_Renamed.Keys.GetEnumerator(); iterator.MoveNext(); )
+			for (System.Collections.IEnumerator iterator = set_Renamed.GetEnumerator(); iterator.MoveNext(); )
 			{
 				TermVectorEntry tve = (TermVectorEntry) iterator.Current;
 				Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
@@ -364,9 +380,9 @@
 			for (System.Collections.IEnumerator iterator = new System.Collections.Hashtable(map).GetEnumerator(); iterator.MoveNext(); )
 			{
 				System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) iterator.Current;
-				System.Collections.Generic.SortedDictionary<Object, Object> sortedSet = (System.Collections.Generic.SortedDictionary<Object, Object>)entry.Value;
+				System.Collections.Generic.SortedDictionary<Object,Object> sortedSet = (System.Collections.Generic.SortedDictionary<Object,Object>)entry.Value;
 				Assert.IsTrue(sortedSet.Count == 4, "sortedSet Size: " + sortedSet.Count + " is not: " + 4);
-				for (System.Collections.IEnumerator inner = sortedSet.Keys.GetEnumerator(); inner.MoveNext(); )
+				for (System.Collections.IEnumerator inner = sortedSet.GetEnumerator(); inner.MoveNext(); )
 				{
 					TermVectorEntry tve = (TermVectorEntry) inner.Current;
 					Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
@@ -397,9 +413,9 @@
 			for (System.Collections.IEnumerator iterator = new System.Collections.Hashtable(map).GetEnumerator(); iterator.MoveNext(); )
 			{
 				System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) iterator.Current;
-				System.Collections.Generic.SortedDictionary<Object, Object> sortedSet = (System.Collections.Generic.SortedDictionary<Object, Object>)entry.Value;
+				System.Collections.Generic.SortedDictionary<Object,Object> sortedSet = (System.Collections.Generic.SortedDictionary<Object,Object>)entry.Value;
 				Assert.IsTrue(sortedSet.Count == 4, "sortedSet Size: " + sortedSet.Count + " is not: " + 4);
-				for (System.Collections.IEnumerator inner = sortedSet.Keys.GetEnumerator(); inner.MoveNext(); )
+				for (System.Collections.IEnumerator inner = sortedSet.GetEnumerator(); inner.MoveNext(); )
 				{
 					TermVectorEntry tve = (TermVectorEntry) inner.Current;
 					Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
@@ -463,7 +479,7 @@
 				reader.Get(50, testFields[0]);
 				Assert.Fail();
 			}
-			catch (System.IO.IOException)
+			catch (System.IO.IOException e)
 			{
 				// expected exception
 			}
@@ -475,7 +491,7 @@
 				reader.Get(50);
 				Assert.Fail();
 			}
-			catch (System.IO.IOException)
+			catch (System.IO.IOException e)
 			{
 				// expected exception
 			}
@@ -487,14 +503,14 @@
 				TermFreqVector vector = reader.Get(0, "f50");
 				Assert.IsTrue(vector == null);
 			}
-			catch (System.IO.IOException)
+			catch (System.IO.IOException e)
 			{
 				Assert.Fail();
 			}
 		}
 		
 		
-		public class DocNumAwareMapper : TermVectorMapper
+		public class DocNumAwareMapper:TermVectorMapper
 		{
 			
 			public DocNumAwareMapper()

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestTermdocPerf.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestTermdocPerf.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestTermdocPerf.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestTermdocPerf.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,42 +19,48 @@
 
 using NUnit.Framework;
 
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
+using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using Analyzer = Lucene.Net.Analysis.Analyzer;
-using Token = Lucene.Net.Analysis.Token;
-using TokenStream = Lucene.Net.Analysis.TokenStream;
 
 namespace Lucene.Net.Index
 {
 	
-	/// <author>  yonik
-	/// </author>
 	/// <version>  $Id$
 	/// </version>
 	
-	class RepeatingTokenStream : TokenStream
+    [TestFixture]
+	class RepeatingTokenStream:TokenStream
 	{
 		public int num;
-		internal Token t;
+		internal TermAttribute termAtt;
+		internal System.String value_Renamed;
 		
 		public RepeatingTokenStream(System.String val)
 		{
-			t = new Token(val, 0, val.Length);
-            t.SetTermBuffer(val);
+			this.value_Renamed = val;
+			this.termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
 		}
 		
-		public override Token Next(Token reusableToken)
+		public override bool IncrementToken()
 		{
-            return --num < 0 ? null : (Token)(t.Clone());
-        }
+			num--;
+			if (num >= 0)
+			{
+				termAtt.SetTermBuffer(value_Renamed);
+				return true;
+			}
+			return false;
+		}
 	}
 	
-	[TestFixture]
-	public class TestTermdocPerf : LuceneTestCase
+	
+	public class TestTermdocPerf:LuceneTestCase
 	{
 		private class AnonymousClassAnalyzer:Analyzer
 		{
@@ -95,12 +101,12 @@
 		
 		internal virtual void  AddDocs(Directory dir, int ndocs, System.String field, System.String val, int maxTF, float percentDocs)
 		{
-			System.Random random = new System.Random((System.Int32) 0);
+			System.Random random = NewRandom();
 			RepeatingTokenStream ts = new RepeatingTokenStream(val);
 			
 			Analyzer analyzer = new AnonymousClassAnalyzer(random, percentDocs, ts, maxTF, this);
 			
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field(field, val, Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
 			IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetMaxBufferedDocs(100);
@@ -116,20 +122,20 @@
 		}
 		
 		
-		public virtual int DoTest(int iter, int ndocs, int maxTF, float percentDocs)
+		public virtual int doTest(int iter, int ndocs, int maxTF, float percentDocs)
 		{
 			Directory dir = new RAMDirectory();
 			
-			long start = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
+			long start = System.DateTime.Now.Millisecond;
 			AddDocs(dir, ndocs, "foo", "val", maxTF, percentDocs);
-			long end = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
+			long end = System.DateTime.Now.Millisecond;
 			System.Console.Out.WriteLine("milliseconds for creation of " + ndocs + " docs = " + (end - start));
 			
 			IndexReader reader = IndexReader.Open(dir);
 			TermEnum tenum = reader.Terms(new Term("foo", "val"));
 			TermDocs tdocs = reader.TermDocs();
 			
-			start = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
+			start = System.DateTime.Now.Millisecond;
 			
 			int ret = 0;
 			for (int i = 0; i < iter; i++)
@@ -141,7 +147,7 @@
 				}
 			}
 			
-			end = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
+			end = System.DateTime.Now.Millisecond;
 			System.Console.Out.WriteLine("milliseconds for " + iter + " TermDocs iteration: " + (end - start));
 			
 			return ret;
@@ -151,7 +157,7 @@
 		public virtual void  TestTermDocPerf()
 		{
 			// performance test for 10% of documents containing a term
-			// DoTest(100000, 10000,3,.1f);
+			// doTest(100000, 10000,3,.1f);
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestThreadedOptimize.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestThreadedOptimize.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestThreadedOptimize.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestThreadedOptimize.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -14,10 +14,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 
 using NUnit.Framework;
 
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Directory = Lucene.Net.Store.Directory;
@@ -26,16 +29,14 @@
 using English = Lucene.Net.Util.English;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 using _TestUtil = Lucene.Net.Util._TestUtil;
-using Analyzer = Lucene.Net.Analysis.Analyzer;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 
 namespace Lucene.Net.Index
 {
 	
-	[TestFixture]
-	public class TestThreadedOptimize : LuceneTestCase
+    [TestFixture]
+	public class TestThreadedOptimize:LuceneTestCase
 	{
-		private class AnonymousClassThread : SupportClass.ThreadClass
+		private class AnonymousClassThread:SupportClass.ThreadClass
 		{
 			public AnonymousClassThread(Lucene.Net.Index.IndexWriter writerFinal, int iFinal, int iterFinal, TestThreadedOptimize enclosingInstance)
 			{
@@ -81,7 +82,7 @@
 				}
 				catch (System.Exception t)
 				{
-					Enclosing_Instance.SetFailed();
+					Enclosing_Instance.setFailed();
 					System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": hit exception");
 					System.Console.Out.WriteLine(t.StackTrace);
 				}
@@ -101,12 +102,12 @@
 		
 		private bool failed;
 		
-		private void  SetFailed()
+		private void  setFailed()
 		{
 			failed = true;
 		}
 		
-		public virtual void  RunTest(Directory directory, bool autoCommit, MergeScheduler merger)
+		public virtual void  runTest(Directory directory, bool autoCommit, MergeScheduler merger)
 		{
 			
 			IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true);
@@ -176,25 +177,25 @@
 		FSDirectory.
 		*/
 		[Test]
-		public virtual void  TestThreadedOptimize_Renamed_Method()
+		public virtual void  TestThreadedOptimize_Renamed()
 		{
 			Directory directory = new MockRAMDirectory();
-            RunTest(directory, false, new SerialMergeScheduler());
-            RunTest(directory, true, new SerialMergeScheduler());
-			RunTest(directory, false, new ConcurrentMergeScheduler());
-			RunTest(directory, true, new ConcurrentMergeScheduler());
+			runTest(directory, false, new SerialMergeScheduler());
+			runTest(directory, true, new SerialMergeScheduler());
+			runTest(directory, false, new ConcurrentMergeScheduler());
+			runTest(directory, true, new ConcurrentMergeScheduler());
 			directory.Close();
-
-            System.String tempDir = SupportClass.AppSettings.Get("tempDir", "");
+			
+			System.String tempDir = SupportClass.AppSettings.Get("tempDir", "");
 			if (tempDir == null)
 				throw new System.IO.IOException("tempDir undefined, cannot run test");
 			
 			System.String dirName = tempDir + "/luceneTestThreadedOptimize";
-			directory = FSDirectory.GetDirectory(dirName);
-            RunTest(directory, false, new SerialMergeScheduler());
-            RunTest(directory, true, new SerialMergeScheduler());
-			RunTest(directory, false, new ConcurrentMergeScheduler());
-			RunTest(directory, true, new ConcurrentMergeScheduler());
+			directory = FSDirectory.Open(new System.IO.FileInfo(dirName));
+			runTest(directory, false, new SerialMergeScheduler());
+			runTest(directory, true, new SerialMergeScheduler());
+			runTest(directory, false, new ConcurrentMergeScheduler());
+			runTest(directory, true, new ConcurrentMergeScheduler());
 			directory.Close();
 			_TestUtil.RmDir(dirName);
 		}