You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ar...@apache.org on 2009/11/03 19:06:38 UTC

svn commit: r832486 [13/29] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene...

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestLazyProxSkipping.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestLazyProxSkipping.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestLazyProxSkipping.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestLazyProxSkipping.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -14,19 +14,20 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Directory = Lucene.Net.Store.Directory;
 using IndexInput = Lucene.Net.Store.IndexInput;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using ScoreDoc = Lucene.Net.Search.ScoreDoc;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using PhraseQuery = Lucene.Net.Search.PhraseQuery;
+using ScoreDoc = Lucene.Net.Search.ScoreDoc;
 using Searcher = Lucene.Net.Search.Searcher;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
@@ -36,8 +37,8 @@
 	/// <summary> Tests lazy skipping on the proximity file.
 	/// 
 	/// </summary>
-	[TestFixture]
-	public class TestLazyProxSkipping : LuceneTestCase
+    [TestFixture]
+	public class TestLazyProxSkipping:LuceneTestCase
 	{
 		private Searcher searcher;
 		private int seeksCounter = 0;
@@ -47,16 +48,49 @@
 		private System.String term2 = "yy";
 		private System.String term3 = "zz";
 		
+		[Serializable]
+		private class SeekCountingDirectory:RAMDirectory
+		{
+			public SeekCountingDirectory(TestLazyProxSkipping enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestLazyProxSkipping enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestLazyProxSkipping enclosingInstance;
+			public TestLazyProxSkipping Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			public override IndexInput OpenInput(System.String name)
+			{
+				IndexInput ii = base.OpenInput(name);
+				if (name.EndsWith(".prx"))
+				{
+					// we decorate the proxStream with a wrapper class that allows to count the number of calls of seek()
+					ii = new SeeksCountingStream(enclosingInstance, ii);
+				}
+				return ii;
+			}
+		}
+		
 		private void  CreateIndex(int numHits)
 		{
 			int numDocs = 500;
 			
-			Directory directory = new RAMDirectory();
+			Directory directory = new SeekCountingDirectory(this);
 			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetUseCompoundFile(false);
 			writer.SetMaxBufferedDocs(10);
 			for (int i = 0; i < numDocs; i++)
 			{
-				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+				Document doc = new Document();
 				System.String content;
 				if (i % (numDocs / numHits) == 0)
 				{
@@ -82,11 +116,7 @@
 			writer.Optimize();
 			writer.Close();
 			
-			// the index is a single segment, thus IndexReader.open() returns an instance of SegmentReader
-			SegmentReader reader = (SegmentReader) IndexReader.Open(directory);
-			
-			// we decorate the proxStream with a wrapper class that allows to count the number of calls of seek()
-			reader.ProxStream_ForNUnitTest = new SeeksCountingStream(this, reader.ProxStream_ForNUnitTest);
+			SegmentReader reader = SegmentReader.GetOnlySegmentReader(directory);
 			
 			this.searcher = new IndexSearcher(reader);
 		}
@@ -95,8 +125,8 @@
 		{
 			// create PhraseQuery "term1 term2" and search
 			PhraseQuery pq = new PhraseQuery();
-			pq.Add(new Term(this.field, this.term1));
-			pq.Add(new Term(this.field, this.term2));
+			pq.add(new Term(this.field, this.term1));
+			pq.add(new Term(this.field, this.term2));
 			return this.searcher.Search(pq, null, 1000).scoreDocs;
 		}
 		
@@ -109,6 +139,7 @@
 			Assert.AreEqual(numHits, hits.Length);
 			
 			// check if the number of calls of seek() does not exceed the number of hits
+			Assert.IsTrue(this.seeksCounter > 0);
 			Assert.IsTrue(this.seeksCounter <= numHits + 1);
 		}
 		
@@ -124,7 +155,7 @@
 		public virtual void  TestSeek()
 		{
 			Directory directory = new RAMDirectory();
-            IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			for (int i = 0; i < 10; i++)
 			{
 				Document doc = new Document();
@@ -154,7 +185,7 @@
 		
 		// Simply extends IndexInput in a way that we are able to count the number
 		// of invocations of seek()
-		internal class SeeksCountingStream : IndexInput, System.ICloneable
+		internal class SeeksCountingStream:IndexInput, System.ICloneable
 		{
 			private void  InitBlock(TestLazyProxSkipping enclosingInstance)
 			{

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiLevelSkipList.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestMultiLevelSkipList.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiLevelSkipList.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiLevelSkipList.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,16 +19,18 @@
 
 using NUnit.Framework;
 
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using LowerCaseTokenizer = Lucene.Net.Analysis.LowerCaseTokenizer;
+using TokenFilter = Lucene.Net.Analysis.TokenFilter;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
+using PayloadAttribute = Lucene.Net.Analysis.Tokenattributes.PayloadAttribute;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
+using Index = Lucene.Net.Documents.Field.Index;
+using Store = Lucene.Net.Documents.Field.Store;
 using IndexInput = Lucene.Net.Store.IndexInput;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using Analyzer = Lucene.Net.Analysis.Analyzer;
-using LowerCaseTokenizer = Lucene.Net.Analysis.LowerCaseTokenizer;
-using Token = Lucene.Net.Analysis.Token;
-using TokenFilter = Lucene.Net.Analysis.TokenFilter;
-using TokenStream = Lucene.Net.Analysis.TokenStream;
 
 namespace Lucene.Net.Index
 {
@@ -40,14 +42,14 @@
 	/// testcases.
 	/// 
 	/// </summary>
-	[TestFixture]
-	public class TestMultiLevelSkipList : LuceneTestCase
+    [TestFixture]
+	public class TestMultiLevelSkipList:LuceneTestCase
 	{
 		[Test]
 		public virtual void  TestSimpleSkip()
 		{
 			RAMDirectory dir = new RAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new PayloadAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writer = new IndexWriter(dir, new PayloadAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			Term term = new Term("test", "a");
 			for (int i = 0; i < 5000; i++)
 			{
@@ -59,9 +61,9 @@
 			writer.Optimize();
 			writer.Close();
 			
-			IndexReader reader = IndexReader.Open(dir);
+			IndexReader reader = SegmentReader.GetOnlySegmentReader(dir);
 			SegmentTermPositions tp = (SegmentTermPositions) reader.TermPositions();
-			tp.FreqStream_ForNUnitTest = new CountingStream(this, tp.FreqStream_ForNUnitTest);
+			tp.freqStream_ForNUnit = new CountingStream(this, tp.freqStream_ForNUnit);
 			
 			for (int i = 0; i < 2; i++)
 			{
@@ -94,7 +96,7 @@
 			Assert.AreEqual((byte) target, b[0], "Wrong payload for the target " + target + ": " + b[0]);
 		}
 		
-		private class PayloadAnalyzer : Analyzer
+		private class PayloadAnalyzer:Analyzer
 		{
 			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
 			{
@@ -102,23 +104,25 @@
 			}
 		}
 		
-		private class PayloadFilter : TokenFilter
+		private class PayloadFilter:TokenFilter
 		{
 			internal static int count = 0;
 			
+			internal PayloadAttribute payloadAtt;
+			
 			protected internal PayloadFilter(TokenStream input):base(input)
 			{
+				payloadAtt = (PayloadAttribute) AddAttribute(typeof(PayloadAttribute));
 			}
 			
-			public override Token Next(Token reusableToken)
+			public override bool IncrementToken()
 			{
-                System.Diagnostics.Debug.Assert(reusableToken != null);
-				Token nextToken = input.Next(reusableToken);
-				if (nextToken != null)
+				bool hasNext = input.IncrementToken();
+				if (hasNext)
 				{
-					nextToken.SetPayload(new Payload(new byte[]{(byte) count++}));
+					payloadAtt.SetPayload(new Payload(new byte[]{(byte) count++}));
 				}
-				return nextToken;
+				return hasNext;
 			}
 		}
 		
@@ -126,7 +130,7 @@
 		
 		// Simply extends IndexInput in a way that we are able to count the number
 		// of bytes read
-		internal class CountingStream : IndexInput, System.ICloneable
+		internal class CountingStream:IndexInput, System.ICloneable
 		{
 			private void  InitBlock(TestMultiLevelSkipList enclosingInstance)
 			{

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestMultiReader.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiReader.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,15 +19,13 @@
 
 using NUnit.Framework;
 
-using Directory = Lucene.Net.Store.Directory;
-
 namespace Lucene.Net.Index
 {
 	
-	[TestFixture]
-	public class TestMultiReader : TestMultiSegmentReader
+    [TestFixture]
+	public class TestMultiReader:TestDirectoryReader
 	{
-		public TestMultiReader():base()
+		public TestMultiReader(System.String s):base(s)
 		{
 		}
 		

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestNRTReaderWithThreads.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestNRTReaderWithThreads.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestNRTReaderWithThreads.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestNRTReaderWithThreads.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,146 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Directory = Lucene.Net.Store.Directory;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using HeavyAtomicInt = Lucene.Net.Index.TestIndexWriterReader.HeavyAtomicInt;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Index
+{
+	
+    [TestFixture]
+	public class TestNRTReaderWithThreads:LuceneTestCase
+	{
+		internal System.Random random = new System.Random();
+		internal HeavyAtomicInt seq = new HeavyAtomicInt(1);
+		
+        [Test]
+		public virtual void  TestIndexing()
+		{
+			Directory mainDir = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(mainDir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetUseCompoundFile(false);
+			IndexReader reader = writer.GetReader(); // start pooling readers
+			reader.Close();
+			writer.SetMergeFactor(2);
+			writer.SetMaxBufferedDocs(10);
+			RunThread[] indexThreads = new RunThread[4];
+			for (int x = 0; x < indexThreads.Length; x++)
+			{
+				indexThreads[x] = new RunThread(this, x % 2, writer);
+				indexThreads[x].Name = "Thread " + x;
+				indexThreads[x].Start();
+			}
+			long startTime = System.DateTime.Now.Millisecond;
+			long duration = 5 * 1000;
+			while ((System.DateTime.Now.Millisecond - startTime) < duration)
+			{
+				System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 100));
+			}
+			int delCount = 0;
+			int addCount = 0;
+			for (int x = 0; x < indexThreads.Length; x++)
+			{
+				indexThreads[x].run_Renamed_Field = false;
+				Assert.IsTrue(indexThreads[x].ex == null);
+				addCount += indexThreads[x].addCount;
+				delCount += indexThreads[x].delCount;
+			}
+			for (int x = 0; x < indexThreads.Length; x++)
+			{
+				indexThreads[x].Join();
+			}
+			//System.out.println("addCount:"+addCount);
+			//System.out.println("delCount:"+delCount);
+			writer.Close();
+			mainDir.Close();
+		}
+		
+		public class RunThread:SupportClass.ThreadClass
+		{
+			private void  InitBlock(TestNRTReaderWithThreads enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestNRTReaderWithThreads enclosingInstance;
+			public TestNRTReaderWithThreads Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal IndexWriter writer;
+			internal bool run_Renamed_Field = true;
+			internal System.Exception ex;
+			internal int delCount = 0;
+			internal int addCount = 0;
+			internal int type;
+			
+			public RunThread(TestNRTReaderWithThreads enclosingInstance, int type, IndexWriter writer)
+			{
+				InitBlock(enclosingInstance);
+				this.type = type;
+				this.writer = writer;
+			}
+			
+			override public void  Run()
+			{
+				try
+				{
+					while (run_Renamed_Field)
+					{
+						//int n = random.nextInt(2);
+						if (type == 0)
+						{
+							int i = Enclosing_Instance.seq.AddAndGet(1);
+							Document doc = TestIndexWriterReader.CreateDocument(i, "index1", 10);
+							writer.AddDocument(doc);
+							addCount++;
+						}
+						else if (type == 1)
+						{
+							// we may or may not delete because the term may not exist,
+							// however we're opening and closing the reader rapidly
+							IndexReader reader = writer.GetReader();
+							int id = Enclosing_Instance.random.Next(Enclosing_Instance.seq.IntValue());
+							Term term = new Term("id", System.Convert.ToString(id));
+							int count = TestIndexWriterReader.Count(term, reader);
+							writer.DeleteDocuments(term);
+							reader.Close();
+							delCount += count;
+						}
+					}
+				}
+				catch (System.Exception ex)
+				{
+					System.Console.Out.WriteLine(ex.StackTrace);
+					this.ex = ex;
+					run_Renamed_Field = false;
+				}
+			}
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestNorms.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestNorms.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestNorms.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestNorms.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,14 +19,14 @@
 
 using NUnit.Framework;
 
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Index = Lucene.Net.Documents.Field.Index;
 using Store = Lucene.Net.Documents.Field.Store;
 using Directory = Lucene.Net.Store.Directory;
 using FSDirectory = Lucene.Net.Store.FSDirectory;
-using Analyzer = Lucene.Net.Analysis.Analyzer;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using DefaultSimilarity = Lucene.Net.Search.DefaultSimilarity;
 using Similarity = Lucene.Net.Search.Similarity;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
@@ -37,12 +37,12 @@
 	/// <summary> Test that norms info is preserved during index life - including
 	/// separate norms, addDocument, addIndexes, optimize.
 	/// </summary>
-	[TestFixture]
-	public class TestNorms : LuceneTestCase
+    [TestFixture]
+	public class TestNorms:LuceneTestCase
 	{
 		
 		[Serializable]
-		private class SimilarityOne : DefaultSimilarity
+		private class SimilarityOne:DefaultSimilarity
 		{
 			public SimilarityOne(TestNorms enclosingInstance)
 			{
@@ -77,9 +77,12 @@
 		private float lastNorm = 0;
 		private float normDelta = (float) 0.001;
 		
+		public TestNorms(System.String s):base(s)
+		{
+		}
 		
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
 			base.SetUp();
 			similarityOne = new SimilarityOne(this);
@@ -103,7 +106,7 @@
 			
 			// test with a single index: index1
 			System.IO.FileInfo indexDir1 = new System.IO.FileInfo(System.IO.Path.Combine(tempDir, "lucenetestindex1"));
-			Directory dir1 = FSDirectory.GetDirectory(indexDir1);
+			Directory dir1 = FSDirectory.Open(indexDir1);
 			
 			norms = new System.Collections.ArrayList();
 			modifiedNorms = new System.Collections.ArrayList();
@@ -121,17 +124,17 @@
 			numDocNorms = 0;
 			
 			System.IO.FileInfo indexDir2 = new System.IO.FileInfo(System.IO.Path.Combine(tempDir, "lucenetestindex2"));
-			Directory dir2 = FSDirectory.GetDirectory(indexDir2);
+			Directory dir2 = FSDirectory.Open(indexDir2);
 			
 			CreateIndex(dir2);
 			DoTestNorms(dir2);
 			
 			// add index1 and index2 to a third index: index3
 			System.IO.FileInfo indexDir3 = new System.IO.FileInfo(System.IO.Path.Combine(tempDir, "lucenetestindex3"));
-			Directory dir3 = FSDirectory.GetDirectory(indexDir3);
+			Directory dir3 = FSDirectory.Open(indexDir3);
 			
 			CreateIndex(dir3);
-            IndexWriter iw = new IndexWriter(dir3, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter iw = new IndexWriter(dir3, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
 			iw.SetMaxBufferedDocs(5);
 			iw.SetMergeFactor(3);
 			iw.AddIndexes(new Directory[]{dir1, dir2});
@@ -148,7 +151,7 @@
 			DoTestNorms(dir3);
 			
 			// now with optimize
-            iw = new IndexWriter(dir3, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
+			iw = new IndexWriter(dir3, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
 			iw.SetMaxBufferedDocs(5);
 			iw.SetMergeFactor(3);
 			iw.Optimize();
@@ -177,7 +180,7 @@
 		
 		private void  CreateIndex(Directory dir)
 		{
-            IndexWriter iw = new IndexWriter(dir, anlzr, true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter iw = new IndexWriter(dir, anlzr, true, IndexWriter.MaxFieldLength.LIMITED);
 			iw.SetMaxBufferedDocs(5);
 			iw.SetMergeFactor(3);
 			iw.SetSimilarity(similarityOne);
@@ -226,7 +229,7 @@
 		
 		private void  AddDocs(Directory dir, int ndocs, bool compound)
 		{
-            IndexWriter iw = new IndexWriter(dir, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter iw = new IndexWriter(dir, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
 			iw.SetMaxBufferedDocs(5);
 			iw.SetMergeFactor(3);
 			iw.SetSimilarity(similarityOne);
@@ -239,13 +242,13 @@
 		}
 		
 		// create the next document
-		private Lucene.Net.Documents.Document NewDoc()
+		private Document NewDoc()
 		{
-			Lucene.Net.Documents.Document d = new Lucene.Net.Documents.Document();
+			Document d = new Document();
 			float boost = NextNorm();
 			for (int i = 0; i < 10; i++)
 			{
-				Field f = new Field("f" + i, "v" + i, Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.NOT_ANALYZED);
+				Field f = new Field("f" + i, "v" + i, Field.Store.NO, Field.Index.NOT_ANALYZED);
 				f.SetBoost(boost);
 				d.Add(f);
 			}
@@ -272,7 +275,7 @@
 			modifiedNorms.Insert(numDocNorms, (float) norm);
 			//System.out.println("creating norm("+numDocNorms+"): "+norm);
 			numDocNorms++;
-			lastNorm = (norm > 10 ? 0 : norm); //there's a limit to how many distinct values can be stored in a ingle byte
+			lastNorm = (norm > 10?0:norm); //there's a limit to how many distinct values can be stored in a ingle byte
 			return norm;
 		}
 	}

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestOmitTf.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestOmitTf.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestOmitTf.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestOmitTf.cs Tue Nov  3 18:06:27 2009
@@ -1,13 +1,13 @@
-/**
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
  * The ASF licenses this file to You under the Apache License, Version 2.0
  * (the "License"); you may not use this file except in compliance with
  * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -15,359 +15,534 @@
  * limitations under the License.
  */
 
+using System;
+
 using NUnit.Framework;
 
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using _TestUtil = Lucene.Net.Util._TestUtil;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
+using Directory = Lucene.Net.Store.Directory;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using BooleanQuery = Lucene.Net.Search.BooleanQuery;
-using HitCollector = Lucene.Net.Search.HitCollector;
+using Collector = Lucene.Net.Search.Collector;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using Scorer = Lucene.Net.Search.Scorer;
 using Searcher = Lucene.Net.Search.Searcher;
 using Similarity = Lucene.Net.Search.Similarity;
 using TermQuery = Lucene.Net.Search.TermQuery;
 using Occur = Lucene.Net.Search.BooleanClause.Occur;
-using Directory = Lucene.Net.Store.Directory;
-using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using _TestUtil = Lucene.Net.Util._TestUtil;
 
 namespace Lucene.Net.Index
 {
+	
+	
     [TestFixture]
-    public class TestOmitTf : LuceneTestCase
-    {
-
-        public class SimpleSimilarity : Similarity
-        {
-            override public float LengthNorm(string field, int numTerms) { return 1.0f; }
-            override public float QueryNorm(float sumOfSquaredWeights) { return 1.0f; }
-            override public float Tf(float freq) { return freq; }
-            override public float SloppyFreq(int distance) { return 2.0f; }
-            override public float Idf(System.Collections.ICollection terms, Searcher searcher) { return 1.0f; }
-            override public float Idf(int docFreq, int numDocs) { return 1.0f; }
-            override public float Coord(int overlap, int maxOverlap) { return 1.0f; }
-        }
-
-        // Tests whether the DocumentWriter correctly enable the
-        // omitTf bit in the FieldInfo
-        [Test]
-        public void TestOmitTf_Renamed()
-        {
-            Directory ram = new MockRAMDirectory();
-            Analyzer analyzer = new StandardAnalyzer();
-            IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
-            Document d = new Document();
-
-            // this field will have Tf
-            Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);
-            d.Add(f1);
-
-            // this field will NOT have Tf
-            Field f2 = new Field("f2", "This field has NO Tf in all docs", Field.Store.NO, Field.Index.ANALYZED);
-            f2.SetOmitTf(true);
-            d.Add(f2);
-
-            writer.AddDocument(d);
-            writer.Optimize();
-            // now we add another document which has term freq for field f2 and not for f1 and verify if the SegmentMerger
-            // keep things constant
-            d = new Document();
-
-            // Reverese
-            f1.SetOmitTf(true);
-            d.Add(f1);
-
-            f2.SetOmitTf(false);
-            d.Add(f2);
-
-            writer.AddDocument(d);
-            // force merge
-            writer.Optimize();
-            // flush
-            writer.Close();
-            _TestUtil.CheckIndex(ram);
-
-            // only one segment in the index, so we can cast to SegmentReader
-            SegmentReader reader = (SegmentReader)IndexReader.Open(ram);
-            FieldInfos fi = reader.FieldInfos();
-            Assert.IsTrue(fi.FieldInfo("f1").omitTf_ForNUnitTest, "OmitTf field bit should be set.");
-            Assert.IsTrue(fi.FieldInfo("f2").omitTf_ForNUnitTest, "OmitTf field bit should be set.");
-
-            reader.Close();
-            ram.Close();
-        }
-
-        // Tests whether merging of docs that have different
-        // omitTf for the same field works
-        [Test]
-        public void TestMixedMerge()
-        {
-            Directory ram = new MockRAMDirectory();
-            Analyzer analyzer = new StandardAnalyzer();
-            IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
-            writer.SetMaxBufferedDocs(3);
-            writer.SetMergeFactor(2);
-            Document d = new Document();
-
-            // this field will have Tf
-            Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);
-            d.Add(f1);
-
-            // this field will NOT have Tf
-            Field f2 = new Field("f2", "This field has NO Tf in all docs", Field.Store.NO, Field.Index.ANALYZED);
-            f2.SetOmitTf(true);
-            d.Add(f2);
-
-            for (int i = 0; i < 30; i++)
-                writer.AddDocument(d);
-
-            // now we add another document which has term freq for field f2 and not for f1 and verify if the SegmentMerger
-            // keep things constant
-            d = new Document();
-
-            // Reverese
-            f1.SetOmitTf(true);
-            d.Add(f1);
-
-            f2.SetOmitTf(false);
-            d.Add(f2);
-
-            for (int i = 0; i < 30; i++)
-                writer.AddDocument(d);
-
-            // force merge
-            writer.Optimize();
-            // flush
-            writer.Close();
-
-            _TestUtil.CheckIndex(ram);
-
-            // only one segment in the index, so we can cast to SegmentReader
-            SegmentReader reader = (SegmentReader)IndexReader.Open(ram);
-            FieldInfos fi = reader.FieldInfos();
-            Assert.IsTrue(fi.FieldInfo("f1").omitTf_ForNUnitTest, "OmitTf field bit should be set.");
-            Assert.IsTrue(fi.FieldInfo("f2").omitTf_ForNUnitTest, "OmitTf field bit should be set.");
-
-            reader.Close();
-            ram.Close();
-        }
-
-        // Make sure first adding docs that do not omitTf for
-        // field X, then adding docs that do omitTf for that same
-        // field, 
-        [Test]
-        public void TestMixedRAM()
-        {
-            Directory ram = new MockRAMDirectory();
-            Analyzer analyzer = new StandardAnalyzer();
-            IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
-            writer.SetMaxBufferedDocs(10);
-            writer.SetMergeFactor(2);
-            Document d = new Document();
-
-            // this field will have Tf
-            Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);
-            d.Add(f1);
-
-            // this field will NOT have Tf
-            Field f2 = new Field("f2", "This field has NO Tf in all docs", Field.Store.NO, Field.Index.ANALYZED);
-            d.Add(f2);
-
-            for (int i = 0; i < 5; i++)
-                writer.AddDocument(d);
-
-            f2.SetOmitTf(true);
-
-            for (int i = 0; i < 20; i++)
-                writer.AddDocument(d);
-
-            // force merge
-            writer.Optimize();
-
-            // flush
-            writer.Close();
-
-            _TestUtil.CheckIndex(ram);
-
-            // only one segment in the index, so we can cast to SegmentReader
-            SegmentReader reader = (SegmentReader)IndexReader.Open(ram);
-            FieldInfos fi = reader.FieldInfos();
-            Assert.IsTrue(!fi.FieldInfo("f1").omitTf_ForNUnitTest, "OmitTf field bit should not be set.");
-            Assert.IsTrue(fi.FieldInfo("f2").omitTf_ForNUnitTest, "OmitTf field bit should be set.");
-
-            reader.Close();
-            ram.Close();
-        }
-
-        private void AssertNoPrx(Directory dir)
-        {
-            string[] files = dir.List();
-            for (int i = 0; i < files.Length; i++)
-                Assert.IsFalse(files[i].EndsWith(".prx"));
-        }
-
-        // Verifies no *.prx exists when all fields omit term freq:
-        [Test]
-        public void TestNoPrxFile()
-        {
-            Directory ram = new MockRAMDirectory();
-            Analyzer analyzer = new StandardAnalyzer();
-            IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
-            writer.SetMaxBufferedDocs(3);
-            writer.SetMergeFactor(2);
-            writer.SetUseCompoundFile(false);
-            Document d = new Document();
-
-            Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);
-            f1.SetOmitTf(true);
-            d.Add(f1);
-
-            for (int i = 0; i < 30; i++)
-                writer.AddDocument(d);
-
-            writer.Commit();
-
-            AssertNoPrx(ram);
-
-            // force merge
-            writer.Optimize();
-            // flush
-            writer.Close();
-
-            AssertNoPrx(ram);
-            _TestUtil.CheckIndex(ram);
-            ram.Close();
-        }
-
-        // Test scores with one field with Term Freqs and one without, otherwise with equal content 
-        [Test]
-        public void TestBasic()
-        {
-            Directory dir = new MockRAMDirectory();
-            Analyzer analyzer = new StandardAnalyzer();
-            IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
-            writer.SetMergeFactor(2);
-            writer.SetMaxBufferedDocs(2);
-            writer.SetSimilarity(new SimpleSimilarity());
-
-
-            System.Text.StringBuilder sb = new System.Text.StringBuilder(265);
-            string term = "term";
-            for (int i = 0; i < 30; i++)
-            {
-                Document d = new Document();
-                sb.Append(term).Append(" ");
-                string content = sb.ToString();
-                Field noTf = new Field("noTf", content + (i % 2 == 0 ? "" : " notf"), Field.Store.NO, Field.Index.ANALYZED);
-                noTf.SetOmitTf(true);
-                d.Add(noTf);
-
-                Field tf = new Field("tf", content + (i % 2 == 0 ? " tf" : ""), Field.Store.NO, Field.Index.ANALYZED);
-                d.Add(tf);
-
-                writer.AddDocument(d);
-                //System.out.println(d);
-            }
-
-            writer.Optimize();
-            // flush
-            writer.Close();
-            _TestUtil.CheckIndex(dir);
-
-            /*
-             * Verify the index
-             */
-            Searcher searcher = new IndexSearcher(dir);
-            searcher.SetSimilarity(new SimpleSimilarity());
-
-            Term a = new Term("noTf", term);
-            Term b = new Term("tf", term);
-            Term c = new Term("noTf", "notf");
-            Term d2 = new Term("tf", "tf");
-            TermQuery q1 = new TermQuery(a);
-            TermQuery q2 = new TermQuery(b);
-            TermQuery q3 = new TermQuery(c);
-            TermQuery q4 = new TermQuery(d2);
-
-
-            searcher.Search(q1, new AnonymousCountingHitCollector1());
-            searcher.Search(q2, new AnonymousCountingHitCollector2());
-            searcher.Search(q3, new AnonymousCountingHitCollector3());
-            searcher.Search(q4, new AnonymousCountingHitCollector4());
-
-            BooleanQuery bq = new BooleanQuery();
-            bq.Add(q1, Occur.MUST);
-            bq.Add(q4, Occur.MUST);
-
-            searcher.Search(bq, new AnonymousCountingHitCollector5());
-            Assert.IsTrue(15 == CountingHitCollector.GetCount());
-
-            searcher.Close();
-            dir.Close();
-        }
-
-        public class CountingHitCollector : HitCollector
-        {
-            static int count = 0;
-            static int sum = 0;
-            internal CountingHitCollector() { count = 0; sum = 0; }
-            override public void Collect(int doc, float score)
-            {
-                count++;
-                sum += doc;  // use it to avoid any possibility of being optimized away
-            }
-
-            public static int GetCount() { return count; }
-            public static int GetSum() { return sum; }
-        }
-
-        public class AnonymousCountingHitCollector1 : CountingHitCollector
-        {
-            override public void Collect(int doc, float score)
-            {
-                //System.out.println("Q1: Doc=" + doc + " score=" + score);
-                Assert.IsTrue(score == 1.0f);
-                base.Collect(doc, score);
-            }
-        }
-        public class AnonymousCountingHitCollector2 : CountingHitCollector
-        {
-            override public void Collect(int doc, float score)
-            {
-                //System.out.println("Q1: Doc=" + doc + " score=" + score);
-                Assert.IsTrue(score == 1.0f+doc);
-                base.Collect(doc, score);
-            }
-        }
-        public class AnonymousCountingHitCollector3 : CountingHitCollector
-        {
-            override public void Collect(int doc, float score)
-            {
-                //System.out.println("Q1: Doc=" + doc + " score=" + score);
-                Assert.IsTrue(score == 1.0f);
-                Assert.IsFalse(doc % 2 == 0);
-                base.Collect(doc, score);
-            }
-        }
-        public class AnonymousCountingHitCollector4 : CountingHitCollector
-        {
-            override public void Collect(int doc, float score)
-            {
-                //System.out.println("Q1: Doc=" + doc + " score=" + score);
-                Assert.IsTrue(score == 1.0f);
-                Assert.IsTrue(doc % 2 == 0);
-                base.Collect(doc, score);
-            }
-        };
-        public class AnonymousCountingHitCollector5 : CountingHitCollector
-        {
-            override public void Collect(int doc, float score)
-            {
-                //System.out.println("BQ: Doc=" + doc + " score=" + score);
-                base.Collect(doc, score);
-            }
-        }
-    }
-}
+	public class TestOmitTf:LuceneTestCase
+	{
+		private class AnonymousClassCountingHitCollector:CountingHitCollector
+		{
+			public AnonymousClassCountingHitCollector(TestOmitTf enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestOmitTf enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestOmitTf enclosingInstance;
+			public TestOmitTf Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			private Scorer scorer;
+			public override void  SetScorer(Scorer scorer)
+			{
+				this.scorer = scorer;
+			}
+			public override void  Collect(int doc)
+			{
+				//System.out.println("Q1: Doc=" + doc + " score=" + score);
+				float score = scorer.Score();
+				Assert.IsTrue(score == 1.0f);
+				base.Collect(doc);
+			}
+		}
+		
+		private class AnonymousClassCountingHitCollector1:CountingHitCollector
+		{
+			public AnonymousClassCountingHitCollector1(TestOmitTf enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestOmitTf enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestOmitTf enclosingInstance;
+			public TestOmitTf Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			private Scorer scorer;
+			public override void  SetScorer(Scorer scorer)
+			{
+				this.scorer = scorer;
+			}
+			public override void  Collect(int doc)
+			{
+				//System.out.println("Q2: Doc=" + doc + " score=" + score);
+				float score = scorer.Score();
+				Assert.IsTrue(score == 1.0f + doc);
+				base.Collect(doc);
+			}
+		}
+		
+		private class AnonymousClassCountingHitCollector2:CountingHitCollector
+		{
+			public AnonymousClassCountingHitCollector2(TestOmitTf enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestOmitTf enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestOmitTf enclosingInstance;
+			public TestOmitTf Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			private Scorer scorer;
+			public override void  SetScorer(Scorer scorer)
+			{
+				this.scorer = scorer;
+			}
+			public override void  Collect(int doc)
+			{
+				//System.out.println("Q1: Doc=" + doc + " score=" + score);
+				float score = scorer.Score();
+				Assert.IsTrue(score == 1.0f);
+				Assert.IsFalse(doc % 2 == 0);
+				base.Collect(doc);
+			}
+		}
+		
+		private class AnonymousClassCountingHitCollector3:CountingHitCollector
+		{
+			public AnonymousClassCountingHitCollector3(TestOmitTf enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestOmitTf enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestOmitTf enclosingInstance;
+			public TestOmitTf Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			private Scorer scorer;
+			public override void  SetScorer(Scorer scorer)
+			{
+				this.scorer = scorer;
+			}
+			public override void  Collect(int doc)
+			{
+				float score = scorer.Score();
+				//System.out.println("Q1: Doc=" + doc + " score=" + score);
+				Assert.IsTrue(score == 1.0f);
+				Assert.IsTrue(doc % 2 == 0);
+				base.Collect(doc);
+			}
+		}
+		
+		private class AnonymousClassCountingHitCollector4:CountingHitCollector
+		{
+			public AnonymousClassCountingHitCollector4(TestOmitTf enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestOmitTf enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestOmitTf enclosingInstance;
+			public TestOmitTf Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			public override void  Collect(int doc)
+			{
+				//System.out.println("BQ: Doc=" + doc + " score=" + score);
+				base.Collect(doc);
+			}
+		}
+		
+		[Serializable]
+		public class SimpleSimilarity:Similarity
+		{
+			public override float LengthNorm(System.String field, int numTerms)
+			{
+				return 1.0f;
+			}
+			public override float QueryNorm(float sumOfSquaredWeights)
+			{
+				return 1.0f;
+			}
+			
+			public override float Tf(float freq)
+			{
+				return freq;
+			}
+			
+			public override float SloppyFreq(int distance)
+			{
+				return 2.0f;
+			}
+			public override float Idf(System.Collections.ICollection terms, Searcher searcher)
+			{
+				return 1.0f;
+			}
+			public override float Idf(int docFreq, int numDocs)
+			{
+				return 1.0f;
+			}
+			public override float Coord(int overlap, int maxOverlap)
+			{
+				return 1.0f;
+			}
+		}
+		
+		
+		// Tests whether the DocumentWriter correctly enable the
+		// omitTermFreqAndPositions bit in the FieldInfo
+		public virtual void  TestOmitTermFreqAndPositions()
+		{
+			Directory ram = new MockRAMDirectory();
+			Analyzer analyzer = new StandardAnalyzer();
+			IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+			Document d = new Document();
+			
+			// this field will have Tf
+			Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);
+			d.Add(f1);
+			
+			// this field will NOT have Tf
+			Field f2 = new Field("f2", "This field has NO Tf in all docs", Field.Store.NO, Field.Index.ANALYZED);
+			f2.SetOmitTermFreqAndPositions(true);
+			d.Add(f2);
+			
+			writer.AddDocument(d);
+			writer.Optimize();
+			// now we add another document which has term freq for field f2 and not for f1 and verify if the SegmentMerger
+			// keep things constant
+			d = new Document();
+			
+			// Reverese
+			f1.SetOmitTermFreqAndPositions(true);
+			d.Add(f1);
+			
+			f2.SetOmitTermFreqAndPositions(false);
+			d.Add(f2);
+			
+			writer.AddDocument(d);
+			// force merge
+			writer.Optimize();
+			// flush
+			writer.Close();
+			_TestUtil.CheckIndex(ram);
+			
+			SegmentReader reader = SegmentReader.GetOnlySegmentReader(ram);
+			FieldInfos fi = reader.FieldInfos();
+			Assert.IsTrue(fi.FieldInfo("f1").omitTermFreqAndPositions_ForNUnit, "OmitTermFreqAndPositions field bit should be set.");
+			Assert.IsTrue(fi.FieldInfo("f2").omitTermFreqAndPositions_ForNUnit, "OmitTermFreqAndPositions field bit should be set.");
+			
+			reader.Close();
+			ram.Close();
+		}
+		
+		// Tests whether merging of docs that have different
+		// omitTermFreqAndPositions for the same field works
+		[Test]
+		public virtual void  TestMixedMerge()
+		{
+			Directory ram = new MockRAMDirectory();
+			Analyzer analyzer = new StandardAnalyzer();
+			IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetMaxBufferedDocs(3);
+			writer.SetMergeFactor(2);
+			Document d = new Document();
+			
+			// this field will have Tf
+			Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);
+			d.Add(f1);
+			
+			// this field will NOT have Tf
+			Field f2 = new Field("f2", "This field has NO Tf in all docs", Field.Store.NO, Field.Index.ANALYZED);
+			f2.SetOmitTermFreqAndPositions(true);
+			d.Add(f2);
+			
+			for (int i = 0; i < 30; i++)
+				writer.AddDocument(d);
+			
+			// now we add another document which has term freq for field f2 and not for f1 and verify if the SegmentMerger
+			// keep things constant
+			d = new Document();
+			
+			// Reverese
+			f1.SetOmitTermFreqAndPositions(true);
+			d.Add(f1);
+			
+			f2.SetOmitTermFreqAndPositions(false);
+			d.Add(f2);
+			
+			for (int i = 0; i < 30; i++)
+				writer.AddDocument(d);
+			
+			// force merge
+			writer.Optimize();
+			// flush
+			writer.Close();
+			
+			_TestUtil.CheckIndex(ram);
+			
+			SegmentReader reader = SegmentReader.GetOnlySegmentReader(ram);
+			FieldInfos fi = reader.FieldInfos();
+			Assert.IsTrue(fi.FieldInfo("f1").omitTermFreqAndPositions_ForNUnit, "OmitTermFreqAndPositions field bit should be set.");
+			Assert.IsTrue(fi.FieldInfo("f2").omitTermFreqAndPositions_ForNUnit, "OmitTermFreqAndPositions field bit should be set.");
+			
+			reader.Close();
+			ram.Close();
+		}
+		
+		// Make sure first adding docs that do not omitTermFreqAndPositions for
+		// field X, then adding docs that do omitTermFreqAndPositions for that same
+		// field, 
+		[Test]
+		public virtual void  TestMixedRAM()
+		{
+			Directory ram = new MockRAMDirectory();
+			Analyzer analyzer = new StandardAnalyzer();
+			IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetMaxBufferedDocs(10);
+			writer.SetMergeFactor(2);
+			Document d = new Document();
+			
+			// this field will have Tf
+			Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);
+			d.Add(f1);
+			
+			// this field will NOT have Tf
+			Field f2 = new Field("f2", "This field has NO Tf in all docs", Field.Store.NO, Field.Index.ANALYZED);
+			d.Add(f2);
+			
+			for (int i = 0; i < 5; i++)
+				writer.AddDocument(d);
+			
+			f2.SetOmitTermFreqAndPositions(true);
+			
+			for (int i = 0; i < 20; i++)
+				writer.AddDocument(d);
+			
+			// force merge
+			writer.Optimize();
+			
+			// flush
+			writer.Close();
+			
+			_TestUtil.CheckIndex(ram);
+			
+			SegmentReader reader = SegmentReader.GetOnlySegmentReader(ram);
+			FieldInfos fi = reader.FieldInfos();
+			Assert.IsTrue(!fi.FieldInfo("f1").omitTermFreqAndPositions_ForNUnit, "OmitTermFreqAndPositions field bit should not be set.");
+			Assert.IsTrue(fi.FieldInfo("f2").omitTermFreqAndPositions_ForNUnit, "OmitTermFreqAndPositions field bit should be set.");
+			
+			reader.Close();
+			ram.Close();
+		}
+		
+		private void  AssertNoPrx(Directory dir)
+		{
+			System.String[] files = dir.ListAll();
+			for (int i = 0; i < files.Length; i++)
+				Assert.IsFalse(files[i].EndsWith(".prx"));
+		}
+		
+		// Verifies no *.prx exists when all fields omit term freq:
+		[Test]
+		public virtual void  TestNoPrxFile()
+		{
+			Directory ram = new MockRAMDirectory();
+			Analyzer analyzer = new StandardAnalyzer();
+			IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetMaxBufferedDocs(3);
+			writer.SetMergeFactor(2);
+			writer.SetUseCompoundFile(false);
+			Document d = new Document();
+			
+			Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);
+			f1.SetOmitTermFreqAndPositions(true);
+			d.Add(f1);
+			
+			for (int i = 0; i < 30; i++)
+				writer.AddDocument(d);
+			
+			writer.Commit();
+			
+			AssertNoPrx(ram);
+			
+			// force merge
+			writer.Optimize();
+			// flush
+			writer.Close();
+			
+			AssertNoPrx(ram);
+			_TestUtil.CheckIndex(ram);
+			ram.Close();
+		}
+		
+		// Test scores with one field with Term Freqs and one without, otherwise with equal content 
+		[Test]
+		public virtual void  TestBasic()
+		{
+			Directory dir = new MockRAMDirectory();
+			Analyzer analyzer = new StandardAnalyzer();
+			IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetMergeFactor(2);
+			writer.SetMaxBufferedDocs(2);
+			writer.SetSimilarity(new SimpleSimilarity());
+			
+			
+			System.Text.StringBuilder sb = new System.Text.StringBuilder(265);
+			System.String term = "term";
+			for (int i = 0; i < 30; i++)
+			{
+				Document d = new Document();
+				sb.Append(term).Append(" ");
+				System.String content = sb.ToString();
+				Field noTf = new Field("noTf", content + (i % 2 == 0?"":" notf"), Field.Store.NO, Field.Index.ANALYZED);
+				noTf.SetOmitTermFreqAndPositions(true);
+				d.Add(noTf);
+				
+				Field tf = new Field("tf", content + (i % 2 == 0?" tf":""), Field.Store.NO, Field.Index.ANALYZED);
+				d.Add(tf);
+				
+				writer.AddDocument(d);
+				//System.out.println(d);
+			}
+			
+			writer.Optimize();
+			// flush
+			writer.Close();
+			_TestUtil.CheckIndex(dir);
+			
+			/*
+			* Verify the index
+			*/
+			Searcher searcher = new IndexSearcher(dir);
+			searcher.SetSimilarity(new SimpleSimilarity());
+			
+			Term a = new Term("noTf", term);
+			Term b = new Term("tf", term);
+			Term c = new Term("noTf", "notf");
+			Term d2 = new Term("tf", "tf");
+			TermQuery q1 = new TermQuery(a);
+			TermQuery q2 = new TermQuery(b);
+			TermQuery q3 = new TermQuery(c);
+			TermQuery q4 = new TermQuery(d2);
+			
+			
+			searcher.Search(q1, new AnonymousClassCountingHitCollector(this));
+			//System.out.println(CountingHitCollector.getCount());
+			
+			
+			searcher.Search(q2, new AnonymousClassCountingHitCollector1(this));
+			//System.out.println(CountingHitCollector.getCount());
+			
+			
+			
+			
+			
+			searcher.Search(q3, new AnonymousClassCountingHitCollector2(this));
+			//System.out.println(CountingHitCollector.getCount());
+			
+			
+			searcher.Search(q4, new AnonymousClassCountingHitCollector3(this));
+			//System.out.println(CountingHitCollector.getCount());
+			
+			
+			
+			BooleanQuery bq = new BooleanQuery();
+			bq.Add(q1, Occur.MUST);
+			bq.Add(q4, Occur.MUST);
+			
+			searcher.Search(bq, new AnonymousClassCountingHitCollector4(this));
+			Assert.IsTrue(15 == CountingHitCollector.GetCount());
+			
+			searcher.Close();
+			dir.Close();
+		}
+		
+		public class CountingHitCollector:Collector
+		{
+			internal static int count = 0;
+			internal static int sum = 0;
+			private int docBase = - 1;
+			internal CountingHitCollector()
+			{
+				count = 0; sum = 0;
+			}
+			public override void  SetScorer(Scorer scorer)
+			{
+			}
+			public override void  Collect(int doc)
+			{
+				count++;
+				sum += doc + docBase; // use it to avoid any possibility of being optimized away
+			}
+			
+			public static int GetCount()
+			{
+				return count;
+			}
+			public static int GetSum()
+			{
+				return sum;
+			}
+			
+			public override void  SetNextReader(IndexReader reader, int docBase)
+			{
+				this.docBase = docBase;
+			}
+			public override bool AcceptsDocsOutOfOrder()
+			{
+				return true;
+			}
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestParallelReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestParallelReader.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestParallelReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestParallelReader.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,17 +19,17 @@
 
 using NUnit.Framework;
 
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using MapFieldSelector = Lucene.Net.Documents.MapFieldSelector;
 using Directory = Lucene.Net.Store.Directory;
 using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using BooleanQuery = Lucene.Net.Search.BooleanQuery;
-using ScoreDoc = Lucene.Net.Search.ScoreDoc;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using Query = Lucene.Net.Search.Query;
+using ScoreDoc = Lucene.Net.Search.ScoreDoc;
 using Searcher = Lucene.Net.Search.Searcher;
 using TermQuery = Lucene.Net.Search.TermQuery;
 using Occur = Lucene.Net.Search.BooleanClause.Occur;
@@ -37,15 +37,16 @@
 
 namespace Lucene.Net.Index
 {
-	[TestFixture]
-	public class TestParallelReader : LuceneTestCase
+	
+    [TestFixture]
+	public class TestParallelReader:LuceneTestCase
 	{
 		
 		private Searcher parallel;
 		private Searcher single;
 		
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
 			base.SetUp();
 			single = Single();
@@ -78,12 +79,12 @@
 			ParallelReader pr = new ParallelReader();
 			pr.Add(IndexReader.Open(dir1));
 			pr.Add(IndexReader.Open(dir2));
-			System.Collections.Generic.ICollection<string> fieldNames = pr.GetFieldNames(IndexReader.FieldOption.ALL);
+			System.Collections.ICollection fieldNames = pr.GetFieldNames(IndexReader.FieldOption.ALL);
 			Assert.AreEqual(4, fieldNames.Count);
-			Assert.IsTrue(fieldNames.Contains("f1"));
-			Assert.IsTrue(fieldNames.Contains("f2"));
-			Assert.IsTrue(fieldNames.Contains("f3"));
-			Assert.IsTrue(fieldNames.Contains("f4"));
+			Assert.IsTrue(SupportClass.CollectionsHelper.Contains(fieldNames, "f1"));
+			Assert.IsTrue(SupportClass.CollectionsHelper.Contains(fieldNames, "f2"));
+			Assert.IsTrue(SupportClass.CollectionsHelper.Contains(fieldNames, "f3"));
+			Assert.IsTrue(SupportClass.CollectionsHelper.Contains(fieldNames, "f4"));
 		}
 		
 		[Test]
@@ -95,9 +96,9 @@
 			pr.Add(IndexReader.Open(dir1));
 			pr.Add(IndexReader.Open(dir2));
 			
-			Lucene.Net.Documents.Document doc11 = pr.Document(0, new MapFieldSelector(new System.String[]{"f1"}));
-			Lucene.Net.Documents.Document doc24 = pr.Document(1, new MapFieldSelector(new System.Collections.ArrayList(new System.String[]{"f4"})));
-			Lucene.Net.Documents.Document doc223 = pr.Document(1, new MapFieldSelector(new System.String[]{"f2", "f3"}));
+			Document doc11 = pr.Document(0, new MapFieldSelector(new System.String[]{"f1"}));
+			Document doc24 = pr.Document(1, new MapFieldSelector(new System.Collections.ArrayList(new System.String[]{"f4"})));
+			Document doc223 = pr.Document(1, new MapFieldSelector(new System.String[]{"f2", "f3"}));
 			
 			Assert.AreEqual(1, doc11.GetFields().Count);
 			Assert.AreEqual(1, doc24.GetFields().Count);
@@ -117,8 +118,8 @@
 			
 			// one document only:
 			Directory dir2 = new MockRAMDirectory();
-            IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-			Lucene.Net.Documents.Document d3 = new Lucene.Net.Documents.Document();
+			IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			Document d3 = new Document();
 			d3.Add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
 			w2.AddDocument(d3);
 			w2.Close();
@@ -130,7 +131,7 @@
 				pr.Add(IndexReader.Open(dir2));
 				Assert.Fail("didn't get exptected exception: indexes don't have same number of documents");
 			}
-			catch (System.ArgumentException)
+			catch (System.ArgumentException e)
 			{
 				// expected exception
 			}
@@ -140,7 +141,7 @@
 		public virtual void  TestIsCurrent()
 		{
 			Directory dir1 = GetDir1();
-			Directory dir2 = GetDir1();
+			Directory dir2 = GetDir2();
 			ParallelReader pr = new ParallelReader();
 			pr.Add(IndexReader.Open(dir1));
 			pr.Add(IndexReader.Open(dir2));
@@ -166,16 +167,16 @@
 		public virtual void  TestIsOptimized()
 		{
 			Directory dir1 = GetDir1();
-			Directory dir2 = GetDir1();
+			Directory dir2 = GetDir2();
 			
 			// add another document to ensure that the indexes are not optimized
-            IndexWriter modifier = new IndexWriter(dir1, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter modifier = new IndexWriter(dir1, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
 			Document d = new Document();
 			d.Add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
 			modifier.AddDocument(d);
 			modifier.Close();
-
-            modifier = new IndexWriter(dir2, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			
+			modifier = new IndexWriter(dir2, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
 			d = new Document();
 			d.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.ANALYZED));
 			modifier.AddDocument(d);
@@ -187,8 +188,8 @@
 			pr.Add(IndexReader.Open(dir2));
 			Assert.IsFalse(pr.IsOptimized());
 			pr.Close();
-
-            modifier = new IndexWriter(dir1, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			
+			modifier = new IndexWriter(dir1, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
 			modifier.Optimize();
 			modifier.Close();
 			
@@ -198,9 +199,9 @@
 			// just one of the two indexes are optimized
 			Assert.IsFalse(pr.IsOptimized());
 			pr.Close();
-
-
-            modifier = new IndexWriter(dir2, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			
+			
+			modifier = new IndexWriter(dir2, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
 			modifier.Optimize();
 			modifier.Close();
 			
@@ -212,17 +213,39 @@
 			pr.Close();
 		}
 		
+		[Test]
+		public virtual void  TestAllTermDocs()
+		{
+			Directory dir1 = GetDir1();
+			Directory dir2 = GetDir2();
+			ParallelReader pr = new ParallelReader();
+			pr.Add(IndexReader.Open(dir1));
+			pr.Add(IndexReader.Open(dir2));
+			int NUM_DOCS = 2;
+			TermDocs td = pr.TermDocs(null);
+			for (int i = 0; i < NUM_DOCS; i++)
+			{
+				Assert.IsTrue(td.Next());
+				Assert.AreEqual(i, td.Doc());
+				Assert.AreEqual(1, td.Freq());
+			}
+			td.Close();
+			pr.Close();
+			dir1.Close();
+			dir2.Close();
+		}
+		
 		
 		private void  QueryTest(Query query)
 		{
 			ScoreDoc[] parallelHits = parallel.Search(query, null, 1000).scoreDocs;
-            ScoreDoc[] singleHits = single.Search(query, null, 1000).scoreDocs;
+			ScoreDoc[] singleHits = single.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(parallelHits.Length, singleHits.Length);
 			for (int i = 0; i < parallelHits.Length; i++)
 			{
 				Assert.AreEqual(parallelHits[i].score, singleHits[i].score, 0.001f);
-				Lucene.Net.Documents.Document docParallel = parallel.Doc(parallelHits[i].doc);
-				Lucene.Net.Documents.Document docSingle = single.Doc(singleHits[i].doc);
+				Document docParallel = parallel.Doc(parallelHits[i].doc);
+				Document docSingle = single.Doc(singleHits[i].doc);
 				Assert.AreEqual(docParallel.Get("f1"), docSingle.Get("f1"));
 				Assert.AreEqual(docParallel.Get("f2"), docSingle.Get("f2"));
 				Assert.AreEqual(docParallel.Get("f3"), docSingle.Get("f3"));
@@ -235,13 +258,13 @@
 		{
 			Directory dir = new MockRAMDirectory();
 			IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-			Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
+			Document d1 = new Document();
 			d1.Add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
 			d1.Add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
 			d1.Add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
 			d1.Add(new Field("f4", "v1", Field.Store.YES, Field.Index.ANALYZED));
 			w.AddDocument(d1);
-			Lucene.Net.Documents.Document d2 = new Lucene.Net.Documents.Document();
+			Document d2 = new Document();
 			d2.Add(new Field("f1", "v2", Field.Store.YES, Field.Index.ANALYZED));
 			d2.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.ANALYZED));
 			d2.Add(new Field("f3", "v2", Field.Store.YES, Field.Index.ANALYZED));
@@ -266,12 +289,12 @@
 		private Directory GetDir1()
 		{
 			Directory dir1 = new MockRAMDirectory();
-            IndexWriter w1 = new IndexWriter(dir1, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-			Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
+			IndexWriter w1 = new IndexWriter(dir1, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			Document d1 = new Document();
 			d1.Add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
 			d1.Add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
 			w1.AddDocument(d1);
-			Lucene.Net.Documents.Document d2 = new Lucene.Net.Documents.Document();
+			Document d2 = new Document();
 			d2.Add(new Field("f1", "v2", Field.Store.YES, Field.Index.ANALYZED));
 			d2.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.ANALYZED));
 			w1.AddDocument(d2);
@@ -282,12 +305,12 @@
 		private Directory GetDir2()
 		{
 			Directory dir2 = new RAMDirectory();
-            IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-			Lucene.Net.Documents.Document d3 = new Lucene.Net.Documents.Document();
+			IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			Document d3 = new Document();
 			d3.Add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
 			d3.Add(new Field("f4", "v1", Field.Store.YES, Field.Index.ANALYZED));
 			w2.AddDocument(d3);
-			Lucene.Net.Documents.Document d4 = new Lucene.Net.Documents.Document();
+			Document d4 = new Document();
 			d4.Add(new Field("f3", "v2", Field.Store.YES, Field.Index.ANALYZED));
 			d4.Add(new Field("f4", "v2", Field.Store.YES, Field.Index.ANALYZED));
 			w2.AddDocument(d4);

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestParallelTermEnum.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestParallelTermEnum.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestParallelTermEnum.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestParallelTermEnum.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,47 +19,46 @@
 
 using NUnit.Framework;
 
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Index = Lucene.Net.Documents.Field.Index;
 using Store = Lucene.Net.Documents.Field.Store;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 
 namespace Lucene.Net.Index
 {
 	
-	[TestFixture]
-	public class TestParallelTermEnum : LuceneTestCase
+    [TestFixture]
+	public class TestParallelTermEnum:LuceneTestCase
 	{
 		private IndexReader ir1;
 		private IndexReader ir2;
 		
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
 			base.SetUp();
-			base.SetUp();
-			Lucene.Net.Documents.Document doc;
+			Document doc;
 			
 			RAMDirectory rd1 = new RAMDirectory();
-            IndexWriter iw1 = new IndexWriter(rd1, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter iw1 = new IndexWriter(rd1, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
-			doc = new Lucene.Net.Documents.Document();
-			doc.Add(new Field("field1", "the quick brown fox jumps", Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED));
-			doc.Add(new Field("field2", "the quick brown fox jumps", Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED));
-			doc.Add(new Field("field4", "", Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED));
+			doc = new Document();
+			doc.Add(new Field("field1", "the quick brown fox jumps", Field.Store.YES, Field.Index.ANALYZED));
+			doc.Add(new Field("field2", "the quick brown fox jumps", Field.Store.YES, Field.Index.ANALYZED));
+			doc.Add(new Field("field4", "", Field.Store.NO, Field.Index.ANALYZED));
 			iw1.AddDocument(doc);
 			
 			iw1.Close();
 			RAMDirectory rd2 = new RAMDirectory();
-            IndexWriter iw2 = new IndexWriter(rd2, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter iw2 = new IndexWriter(rd2, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
-			doc = new Lucene.Net.Documents.Document();
-			doc.Add(new Field("field0", "", Lucene.Net.Documents.Field.Store.NO, Lucene.Net.Documents.Field.Index.ANALYZED));
-			doc.Add(new Field("field1", "the fox jumps over the lazy dog", Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED));
-			doc.Add(new Field("field3", "the fox jumps over the lazy dog", Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED));
+			doc = new Document();
+			doc.Add(new Field("field0", "", Field.Store.NO, Field.Index.ANALYZED));
+			doc.Add(new Field("field1", "the fox jumps over the lazy dog", Field.Store.YES, Field.Index.ANALYZED));
+			doc.Add(new Field("field3", "the fox jumps over the lazy dog", Field.Store.YES, Field.Index.ANALYZED));
 			iw2.AddDocument(doc);
 			
 			iw2.Close();
@@ -69,10 +68,9 @@
 		}
 		
 		[TearDown]
-		public override void TearDown()
+		public override void  TearDown()
 		{
 			base.TearDown();
-			base.TearDown();
 			
 			ir1.Close();
 			ir2.Close();

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestPayloads.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestPayloads.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestPayloads.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestPayloads.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,28 +19,30 @@
 
 using NUnit.Framework;
 
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
-using Directory = Lucene.Net.Store.Directory;
-using FSDirectory = Lucene.Net.Store.FSDirectory;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
-using Token = Lucene.Net.Analysis.Token;
 using TokenFilter = Lucene.Net.Analysis.TokenFilter;
 using TokenStream = Lucene.Net.Analysis.TokenStream;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using WhitespaceTokenizer = Lucene.Net.Analysis.WhitespaceTokenizer;
+using PayloadAttribute = Lucene.Net.Analysis.Tokenattributes.PayloadAttribute;
+using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using Directory = Lucene.Net.Store.Directory;
+using FSDirectory = Lucene.Net.Store.FSDirectory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using UnicodeUtil = Lucene.Net.Util.UnicodeUtil;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using _TestUtil = Lucene.Net.Util._TestUtil;
 
 namespace Lucene.Net.Index
 {
 	
 	
-	[TestFixture]
-	public class TestPayloads : LuceneTestCase
+    [TestFixture]
+	public class TestPayloads:LuceneTestCase
 	{
-		private class AnonymousClassThread : SupportClass.ThreadClass
+		private class AnonymousClassThread:SupportClass.ThreadClass
 		{
 			public AnonymousClassThread(int numDocs, System.String field, Lucene.Net.Index.TestPayloads.ByteArrayPool pool, Lucene.Net.Index.IndexWriter writer, TestPayloads enclosingInstance)
 			{
@@ -74,7 +76,7 @@
 					for (int j = 0; j < numDocs; j++)
 					{
 						Document d = new Document();
-						d.Add(new Field(field, new PoolingPayloadTokenStream(pool)));
+						d.Add(new Field(field, new PoolingPayloadTokenStream(enclosingInstance, pool)));
 						writer.AddDocument(d);
 					}
 				}
@@ -90,7 +92,8 @@
 		[Test]
 		public virtual void  TestPayload()
 		{
-			byte[] testData = System.Text.Encoding.UTF8.GetBytes("This is a test!");
+			rnd = NewRandom();
+			byte[] testData = System.Text.UTF8Encoding.UTF8.GetBytes("This is a test!");
 			Payload payload = new Payload(testData);
 			Assert.AreEqual(testData.Length, payload.Length(), "Wrong payload length.");
 			
@@ -101,7 +104,7 @@
 				payload.CopyTo(target, 0);
 				Assert.Fail("Expected exception not thrown");
 			}
-			catch (System.Exception)
+			catch (System.Exception expected)
 			{
 				// expected exception
 			}
@@ -130,7 +133,7 @@
 				payload.ByteAt(testData.Length + 1);
 				Assert.Fail("Expected exception not thrown");
 			}
-			catch (System.Exception)
+			catch (System.Exception expected)
 			{
 				// expected exception
 			}
@@ -148,9 +151,10 @@
 		[Test]
 		public virtual void  TestPayloadFieldBit()
 		{
+			rnd = NewRandom();
 			Directory ram = new RAMDirectory();
 			PayloadAnalyzer analyzer = new PayloadAnalyzer();
-            IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 			Document d = new Document();
 			// this field won't have any payloads
 			d.Add(new Field("f1", "This field has no payloads", Field.Store.NO, Field.Index.ANALYZED));
@@ -163,42 +167,40 @@
 			// enabled in only some documents
 			d.Add(new Field("f3", "This field has payloads in some docs", Field.Store.NO, Field.Index.ANALYZED));
 			// only add payload data for field f2
-			analyzer.SetPayloadData("f2", 1, System.Text.Encoding.UTF8.GetBytes("somedata"), 0, 1);
+			analyzer.SetPayloadData("f2", 1, System.Text.UTF8Encoding.UTF8.GetBytes("somedata"), 0, 1);
 			writer.AddDocument(d);
 			// flush
 			writer.Close();
 			
-			// only one segment in the index, so we can cast to SegmentReader
-			SegmentReader reader = (SegmentReader) IndexReader.Open(ram);
+			SegmentReader reader = SegmentReader.GetOnlySegmentReader(ram);
 			FieldInfos fi = reader.FieldInfos();
-			Assert.IsFalse(fi.FieldInfo("f1").StorePayloads_ForNUnitTest, "Payload field bit should not be set.");
-			Assert.IsTrue(fi.FieldInfo("f2").StorePayloads_ForNUnitTest, "Payload field bit should be set.");
-			Assert.IsFalse(fi.FieldInfo("f3").StorePayloads_ForNUnitTest, "Payload field bit should not be set.");
+			Assert.IsFalse(fi.FieldInfo("f1").storePayloads_ForNUnit, "Payload field bit should not be set.");
+			Assert.IsTrue(fi.FieldInfo("f2").storePayloads_ForNUnit, "Payload field bit should be set.");
+			Assert.IsFalse(fi.FieldInfo("f3").storePayloads_ForNUnit, "Payload field bit should not be set.");
 			reader.Close();
 			
 			// now we add another document which has payloads for field f3 and verify if the SegmentMerger
 			// enabled payloads for that field
-            writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+			writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 			d = new Document();
 			d.Add(new Field("f1", "This field has no payloads", Field.Store.NO, Field.Index.ANALYZED));
 			d.Add(new Field("f2", "This field has payloads in all docs", Field.Store.NO, Field.Index.ANALYZED));
 			d.Add(new Field("f2", "This field has payloads in all docs", Field.Store.NO, Field.Index.ANALYZED));
 			d.Add(new Field("f3", "This field has payloads in some docs", Field.Store.NO, Field.Index.ANALYZED));
 			// add payload data for field f2 and f3
-			analyzer.SetPayloadData("f2", System.Text.Encoding.UTF8.GetBytes("somedata"), 0, 1);
-			analyzer.SetPayloadData("f3", System.Text.Encoding.UTF8.GetBytes("somedata"), 0, 3);
+			analyzer.SetPayloadData("f2", System.Text.UTF8Encoding.UTF8.GetBytes("somedata"), 0, 1);
+			analyzer.SetPayloadData("f3", System.Text.UTF8Encoding.UTF8.GetBytes("somedata"), 0, 3);
 			writer.AddDocument(d);
 			// force merge
 			writer.Optimize();
 			// flush
 			writer.Close();
 			
-			// only one segment in the index, so we can cast to SegmentReader
-			reader = (SegmentReader) IndexReader.Open(ram);
+			reader = SegmentReader.GetOnlySegmentReader(ram);
 			fi = reader.FieldInfos();
-			Assert.IsFalse(fi.FieldInfo("f1").StorePayloads_ForNUnitTest, "Payload field bit should not be set.");
-			Assert.IsTrue(fi.FieldInfo("f2").StorePayloads_ForNUnitTest, "Payload field bit should be set.");
-			Assert.IsTrue(fi.FieldInfo("f3").StorePayloads_ForNUnitTest, "Payload field bit should be set.");
+			Assert.IsFalse(fi.FieldInfo("f1").storePayloads_ForNUnit, "Payload field bit should not be set.");
+			Assert.IsTrue(fi.FieldInfo("f2").storePayloads_ForNUnit, "Payload field bit should be set.");
+			Assert.IsTrue(fi.FieldInfo("f3").storePayloads_ForNUnit, "Payload field bit should be set.");
 			reader.Close();
 		}
 		
@@ -206,15 +208,16 @@
 		[Test]
 		public virtual void  TestPayloadsEncoding()
 		{
+			rnd = NewRandom();
 			// first perform the test using a RAMDirectory
 			Directory dir = new RAMDirectory();
 			PerformTest(dir);
 			
 			// now use a FSDirectory and repeat same test
-			System.String dirName = "test_payloads";
-			dir = FSDirectory.GetDirectory(dirName);
+			System.IO.FileInfo dirName = _TestUtil.GetTempDir("test_payloads");
+			dir = FSDirectory.Open(dirName);
 			PerformTest(dir);
-			RmDir(dirName);
+			_TestUtil.RmDir(dirName);
 		}
 		
 		// builds an index with payloads in the given Directory and performs
@@ -222,7 +225,7 @@
 		private void  PerformTest(Directory dir)
 		{
 			PayloadAnalyzer analyzer = new PayloadAnalyzer();
-            IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 			
 			// should be in sync with value in TermInfosWriter
 			int skipInterval = 16;
@@ -236,7 +239,7 @@
 			System.Text.StringBuilder sb = new System.Text.StringBuilder();
 			for (int i = 0; i < terms.Length; i++)
 			{
-				sb.Append(terms[i].text_ForNUnitTest);
+				sb.Append(terms[i].text_ForNUnit);
 				sb.Append(" ");
 			}
 			System.String content = sb.ToString();
@@ -362,7 +365,7 @@
 				tp.GetPayload(null, 0);
 				Assert.Fail("Expected exception not thrown");
 			}
-			catch (System.Exception)
+			catch (System.Exception expected)
 			{
 				// expected exception
 			}
@@ -371,7 +374,7 @@
 			
 			// test long payload
 			analyzer = new PayloadAnalyzer();
-            writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+			writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 			System.String singleTerm = "lucene";
 			
 			d = new Document();
@@ -400,14 +403,14 @@
 			reader.Close();
 		}
 		
-		private static System.Random rnd = new System.Random();
+		private System.Random rnd;
 		
-		private static void  GenerateRandomData(byte[] data)
+		private void  GenerateRandomData(byte[] data)
 		{
 			rnd.NextBytes(data);
 		}
 		
-		private static byte[] GenerateRandomData(int n)
+		private byte[] GenerateRandomData(int n)
 		{
 			byte[] data = new byte[n];
 			GenerateRandomData(data);
@@ -435,56 +438,6 @@
 		}
 		
 		
-		private void  RmDir(System.String dir)
-		{
-			System.IO.FileInfo fileDir = new System.IO.FileInfo(dir);
-			bool tmpBool;
-			if (System.IO.File.Exists(fileDir.FullName))
-				tmpBool = true;
-			else
-				tmpBool = System.IO.Directory.Exists(fileDir.FullName);
-			if (tmpBool)
-			{
-				System.IO.FileInfo[] files = SupportClass.FileSupport.GetFiles(fileDir);
-				if (files != null)
-				{
-					for (int i = 0; i < files.Length; i++)
-					{
-						bool tmpBool2;
-						if (System.IO.File.Exists(files[i].FullName))
-						{
-							System.IO.File.Delete(files[i].FullName);
-							tmpBool2 = true;
-						}
-						else if (System.IO.Directory.Exists(files[i].FullName))
-						{
-							System.IO.Directory.Delete(files[i].FullName);
-							tmpBool2 = true;
-						}
-						else
-							tmpBool2 = false;
-						bool generatedAux = tmpBool2;
-					}
-				}
-				bool tmpBool3;
-				if (System.IO.File.Exists(fileDir.FullName))
-				{
-					System.IO.File.Delete(fileDir.FullName);
-					tmpBool3 = true;
-				}
-				else if (System.IO.Directory.Exists(fileDir.FullName))
-				{
-					System.IO.Directory.Delete(fileDir.FullName);
-					tmpBool3 = true;
-				}
-				else
-					tmpBool3 = false;
-				bool generatedAux2 = tmpBool3;
-			}
-		}
-		
-		
-		
 		internal virtual void  AssertByteArrayEquals(byte[] b1, byte[] b2)
 		{
 			if (b1.Length != b2.Length)
@@ -503,7 +456,7 @@
 		
 		
 		/// <summary> This Analyzer uses an WhitespaceTokenizer and PayloadFilter.</summary>
-		private class PayloadAnalyzer : Analyzer
+		private class PayloadAnalyzer:Analyzer
 		{
 			internal System.Collections.IDictionary fieldToData = new System.Collections.Hashtable();
 			
@@ -554,25 +507,26 @@
 		
 		
 		/// <summary> This Filter adds payloads to the tokens.</summary>
-		private class PayloadFilter : TokenFilter
+		private class PayloadFilter:TokenFilter
 		{
 			private byte[] data;
 			private int length;
 			private int offset;
 			internal Payload payload = new Payload();
+			internal PayloadAttribute payloadAtt;
 			
 			public PayloadFilter(TokenStream in_Renamed, byte[] data, int offset, int length):base(in_Renamed)
 			{
 				this.data = data;
 				this.length = length;
 				this.offset = offset;
+				payloadAtt = (PayloadAttribute) AddAttribute(typeof(PayloadAttribute));
 			}
 			
-			public override Token Next(Token reusableToken)
+			public override bool IncrementToken()
 			{
-                System.Diagnostics.Debug.Assert(reusableToken != null);
-				Token nextToken = input.Next(reusableToken);
-				if (nextToken != null)
+				bool hasNext = input.IncrementToken();
+				if (hasNext)
 				{
 					if (offset + length <= data.Length)
 					{
@@ -580,30 +534,31 @@
 						if (p == null)
 						{
 							p = new Payload();
-							nextToken.SetPayload(p);
+							payloadAtt.SetPayload(p);
 						}
 						p.SetData(data, offset, length);
 						offset += length;
 					}
 					else
 					{
-						nextToken.SetPayload(null);
+						payloadAtt.SetPayload(null);
 					}
 				}
 				
-				return nextToken;
+				return hasNext;
 			}
 		}
 		
 		[Test]
 		public virtual void  TestThreadSafety()
 		{
+			rnd = NewRandom();
 			int numThreads = 5;
 			int numDocs = 50;
 			ByteArrayPool pool = new ByteArrayPool(numThreads, 5);
 			
 			Directory dir = new RAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
 			System.String field = "test";
 			
 			SupportClass.ThreadClass[] ingesters = new SupportClass.ThreadClass[numThreads];
@@ -615,13 +570,7 @@
 			
 			for (int i = 0; i < numThreads; i++)
 			{
-				try
-				{
-					ingesters[i].Join();
-				}
-				catch (System.Threading.ThreadInterruptedException)
-				{
-				}
+				ingesters[i].Join();
 			}
 			writer.Close();
 			IndexReader reader = IndexReader.Open(dir);
@@ -635,9 +584,7 @@
 					for (int i = 0; i < freq; i++)
 					{
 						tp.NextPosition();
-                        //System.String s = System.Text.Encoding.UTF8.GetString(tp.GetPayload(new byte[5], 0));
-                        //Assert.AreEqual(s, terms.Term().text_ForNUnitTest);
-                        Assert.AreEqual(pool.BytesToString(tp.GetPayload(new byte[5], 0)), terms.Term().text_ForNUnitTest);
+						Assert.AreEqual(pool.BytesToString(tp.GetPayload(new byte[5], 0)), terms.Term().text_ForNUnit);
 					}
 				}
 				tp.Close();
@@ -648,28 +595,49 @@
 			Assert.AreEqual(pool.Size(), numThreads);
 		}
 		
-		private class PoolingPayloadTokenStream : TokenStream
+		private class PoolingPayloadTokenStream:TokenStream
 		{
+			private void  InitBlock(TestPayloads enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestPayloads enclosingInstance;
+			public TestPayloads Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
 			private byte[] payload;
 			private bool first;
 			private ByteArrayPool pool;
-            private string term;
-			internal PoolingPayloadTokenStream(ByteArrayPool pool)
+			private System.String term;
+			
+			internal TermAttribute termAtt;
+			internal PayloadAttribute payloadAtt;
+			
+			internal PoolingPayloadTokenStream(TestPayloads enclosingInstance, ByteArrayPool pool)
 			{
+				InitBlock(enclosingInstance);
 				this.pool = pool;
 				payload = pool.Get();
-				Lucene.Net.Index.TestPayloads.GenerateRandomData(payload);
-                term = pool.BytesToString(payload);
+				Enclosing_Instance.GenerateRandomData(payload);
+				term = pool.BytesToString(payload);
 				first = true;
+				payloadAtt = (PayloadAttribute) AddAttribute(typeof(PayloadAttribute));
+				termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
 			}
 			
-			public override Token Next(Token reusableToken)
+			public override bool IncrementToken()
 			{
 				if (!first)
-					return null;
-				reusableToken.Reinit(term, 0, 0);
-				reusableToken.SetPayload(new Payload(payload));
-				return reusableToken;
+					return false;
+				first = false;
+				termAtt.SetTermBuffer(term);
+				payloadAtt.SetPayload(new Payload(payload));
+				return true;
 			}
 			
 			public override void  Close()
@@ -690,25 +658,28 @@
 					pool.Add(new byte[size]);
 				}
 			}
-
-            private UnicodeUtil.UTF8Result utf8Result = new UnicodeUtil.UTF8Result();
-
-            internal string BytesToString(byte[] bytes)
-            {
-                lock (this)
-                {
-                    string s = System.Text.Encoding.Default.GetString(bytes);
-                    UnicodeUtil.UTF16toUTF8(s, 0, s.Length, utf8Result);
-                    try{
-                        return System.Text.Encoding.UTF8.GetString(utf8Result.result, 0, utf8Result.length);
-                    }
-                    catch (System.Text.DecoderFallbackException)
-                    {
-                        return null;
-                    }
-                }
-            }
-
+			
+			private UnicodeUtil.UTF8Result utf8Result = new UnicodeUtil.UTF8Result();
+			
+			internal virtual System.String BytesToString(byte[] bytes)
+			{
+				lock (this)
+				{
+					System.String s = new System.String(System.Text.UTF8Encoding.UTF8.GetChars(bytes));
+					UnicodeUtil.UTF16toUTF8(s, 0, s.Length, utf8Result);
+					try
+					{
+						System.String tempStr;
+						tempStr = System.Text.Encoding.GetEncoding("UTF-8").GetString(utf8Result.result);
+						return new System.String(tempStr.ToCharArray(), 0, utf8Result.length);
+					}
+					catch (System.IO.IOException uee)
+					{
+						return null;
+					}
+				}
+			}
+			
 			internal virtual byte[] Get()
 			{
 				lock (this)