You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ar...@apache.org on 2008/07/15 23:44:10 UTC

svn commit: r677059 [4/19] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene.Net/Search/Function/ ...

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestBackwardsCompatibility.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestBackwardsCompatibility.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestBackwardsCompatibility.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestBackwardsCompatibility.cs Tue Jul 15 14:44:04 2008
@@ -19,24 +19,25 @@
 
 using NUnit.Framework;
 
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using Directory = Lucene.Net.Store.Directory;
+using FSDirectory = Lucene.Net.Store.FSDirectory;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Hits = Lucene.Net.Search.Hits;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using TermQuery = Lucene.Net.Search.TermQuery;
-using Hits = Lucene.Net.Search.Hits;
-using Directory = Lucene.Net.Store.Directory;
-using FSDirectory = Lucene.Net.Store.FSDirectory;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Index
 {
 	
 	/*
-	Verify we can read the pre-XXX file format, do searches
+	Verify we can read the pre-2.1 file format, do searches
 	against it, and add documents to it.*/
 	
-    [TestFixture]
-	public class TestBackwardsCompatibility
+	[TestFixture]
+	public class TestBackwardsCompatibility : LuceneTestCase
 	{
 		
 		// Uncomment these cases & run in a pre-lockless checkout
@@ -54,20 +55,24 @@
 		
 		/* Unzips dirName + ".zip" --> dirName, removing dirName
 		first */
-		public virtual void  Unzip(System.String dirName)
+		public virtual void  Unzip(System.String zipName, System.String destDirName)
 		{
 #if SHARP_ZIP_LIB
-			RmDir(dirName);
-
+			// get zip input stream
 			ICSharpCode.SharpZipLib.Zip.ZipInputStream zipFile;
-            zipFile = new ICSharpCode.SharpZipLib.Zip.ZipInputStream(System.IO.File.OpenRead(dirName + ".zip"));
+			zipFile = new ICSharpCode.SharpZipLib.Zip.ZipInputStream(System.IO.File.OpenRead(zipName + ".zip"));
 
+			// get dest directory name
+			System.String dirName = FullDir(destDirName);
 			System.IO.FileInfo fileDir = new System.IO.FileInfo(dirName);
-			System.IO.Directory.CreateDirectory(fileDir.FullName);
 
-            ICSharpCode.SharpZipLib.Zip.ZipEntry entry;
+			// clean up old directory (if there) and create new directory
+			RmDir(fileDir.FullName);
+			System.IO.Directory.CreateDirectory(fileDir.FullName);
 
-            while ((entry = zipFile.GetNextEntry()) != null)
+			// copy file entries from zip stream to directory
+			ICSharpCode.SharpZipLib.Zip.ZipEntry entry;
+			while ((entry = zipFile.GetNextEntry()) != null)
 			{
 				System.IO.Stream streamout = new System.IO.BufferedStream(new System.IO.FileStream(new System.IO.FileInfo(System.IO.Path.Combine(fileDir.FullName, entry.Name)).FullName, System.IO.FileMode.Create));
 				
@@ -78,16 +83,16 @@
 					streamout.Write(buffer, 0, len);
 				}
 				
-                streamout.Close();
+				streamout.Close();
 			}
 			
 			zipFile.Close();
 #else
-            Assert.Fail("Needs integration with SharpZipLib");
+			Assert.Fail("Needs integration with SharpZipLib");
 #endif
 		}
 		
-        [Test]
+		[Test]
 		public virtual void  TestCreateCFS()
 		{
 			System.String dirName = "testindex.cfs";
@@ -95,7 +100,7 @@
 			RmDir(dirName);
 		}
 		
-        [Test]
+		[Test]
 		public virtual void  TestCreateNoCFS()
 		{
 			System.String dirName = "testindex.nocfs";
@@ -103,42 +108,49 @@
 			RmDir(dirName);
 		}
 		
-        [Test]
+		internal System.String[] oldNames = new System.String[]{"prelockless.cfs", "prelockless.nocfs", "presharedstores.cfs", "presharedstores.nocfs"};
+		
+		[Test]
 		public virtual void  TestSearchOldIndex()
 		{
-			System.String[] oldNames = new System.String[]{"prelockless.cfs", "prelockless.nocfs"};
 			for (int i = 0; i < oldNames.Length; i++)
 			{
-				System.String dirName = @"C#\src\test\index\index." + oldNames[i];
-				Unzip(dirName);
-				SearchIndex(dirName);
-				RmDir(dirName);
+				System.String dirName = @"Index\index." + oldNames[i];
+				Unzip(dirName, oldNames[i]);
+				SearchIndex(oldNames[i]);
+				RmDir(oldNames[i]);
 			}
 		}
 		
-        [Test]
+		[Test]
 		public virtual void  TestIndexOldIndexNoAdds()
 		{
-			System.String[] oldNames = new System.String[]{"prelockless.cfs", "prelockless.nocfs"};
 			for (int i = 0; i < oldNames.Length; i++)
 			{
-				System.String dirName = @"C#\src\test\index\index." + oldNames[i];
-				Unzip(dirName);
-				ChangeIndexNoAdds(dirName);
-				RmDir(dirName);
+				System.String dirName = @"Index\index." + oldNames[i];
+				Unzip(dirName, oldNames[i]);
+				ChangeIndexNoAdds(oldNames[i], true);
+				RmDir(oldNames[i]);
+				
+				Unzip(dirName, oldNames[i]);
+				ChangeIndexNoAdds(oldNames[i], false);
+				RmDir(oldNames[i]);
 			}
 		}
 		
-        [Test]
+		[Test]
 		public virtual void  TestIndexOldIndex()
 		{
-			System.String[] oldNames = new System.String[]{"prelockless.cfs", "prelockless.nocfs"};
 			for (int i = 0; i < oldNames.Length; i++)
 			{
-				System.String dirName = @"C#\src\test\index\index." + oldNames[i];
-				Unzip(dirName);
-				ChangeIndexWithAdds(dirName);
-				RmDir(dirName);
+				System.String dirName = @"Index\index." + oldNames[i];
+				Unzip(dirName, oldNames[i]);
+				ChangeIndexWithAdds(oldNames[i], true);
+				RmDir(oldNames[i]);
+				
+				Unzip(dirName, oldNames[i]);
+				ChangeIndexWithAdds(oldNames[i], false);
+				RmDir(oldNames[i]);
 			}
 		}
 		
@@ -147,12 +159,14 @@
 			//QueryParser parser = new QueryParser("contents", new WhitespaceAnalyzer());
 			//Query query = parser.parse("handle:1");
 			
+			dirName = FullDir(dirName);
+			
 			Directory dir = FSDirectory.GetDirectory(dirName);
 			IndexSearcher searcher = new IndexSearcher(dir);
 			
 			Hits hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
 			Assert.AreEqual(34, hits.Length());
-			Lucene.Net.Documents.Document d = hits.Doc(0);
+			Document d = hits.Doc(0);
 			
 			// First document should be #21 since it's norm was increased:
 			Assert.AreEqual("21", d.Get("id"), "didn't get the right document first");
@@ -163,12 +177,15 @@
 		
 		/* Open pre-lockless index, add docs, do a delete &
 		* setNorm, and search */
-		public virtual void  ChangeIndexWithAdds(System.String dirName)
+		public virtual void  ChangeIndexWithAdds(System.String dirName, bool autoCommit)
 		{
 			
+			dirName = FullDir(dirName);
+			
 			Directory dir = FSDirectory.GetDirectory(dirName);
+			
 			// open writer
-			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
+			IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false);
 			
 			// add 10 docs
 			for (int i = 0; i < 10; i++)
@@ -184,11 +201,11 @@
 			IndexSearcher searcher = new IndexSearcher(dir);
 			Hits hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
 			Assert.AreEqual(44, hits.Length(), "wrong number of hits");
-			Lucene.Net.Documents.Document d = hits.Doc(0);
+			Document d = hits.Doc(0);
 			Assert.AreEqual("21", d.Get("id"), "wrong first document");
 			searcher.Close();
 			
-			// make sure we can do another delete & another setNorm against this
+			// make sure we can do delete & setNorm against this
 			// pre-lockless segment:
 			IndexReader reader = IndexReader.Open(dir);
 			Term searchTerm = new Term("id", "6");
@@ -197,7 +214,7 @@
 			reader.SetNorm(22, "content", (float) 2.0);
 			reader.Close();
 			
-			// make sure 2nd delete & 2nd norm "took":
+			// make sure they "took":
 			searcher = new IndexSearcher(dir);
 			hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
 			Assert.AreEqual(43, hits.Length(), "wrong number of hits");
@@ -206,7 +223,7 @@
 			searcher.Close();
 			
 			// optimize
-			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
+			writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false);
 			writer.Optimize();
 			writer.Close();
 			
@@ -222,20 +239,22 @@
 		
 		/* Open pre-lockless index, add docs, do a delete &
 		* setNorm, and search */
-		public virtual void  ChangeIndexNoAdds(System.String dirName)
+		public virtual void ChangeIndexNoAdds(System.String dirName, bool autoCommit)
 		{
 			
+			dirName = FullDir(dirName);
+			
 			Directory dir = FSDirectory.GetDirectory(dirName);
 			
 			// make sure searching sees right # hits
 			IndexSearcher searcher = new IndexSearcher(dir);
 			Hits hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
 			Assert.AreEqual(34, hits.Length(), "wrong number of hits");
-			Lucene.Net.Documents.Document d = hits.Doc(0);
+			Document d = hits.Doc(0);
 			Assert.AreEqual("21", d.Get("id"), "wrong first document");
 			searcher.Close();
 			
-			// make sure we can do another delete & another setNorm against this
+			// make sure we can do a delete & setNorm against this
 			// pre-lockless segment:
 			IndexReader reader = IndexReader.Open(dir);
 			Term searchTerm = new Term("id", "6");
@@ -244,7 +263,7 @@
 			reader.SetNorm(22, "content", (float) 2.0);
 			reader.Close();
 			
-			// make sure 2nd delete & 2nd norm "took":
+			// make sure they "took":
 			searcher = new IndexSearcher(dir);
 			hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
 			Assert.AreEqual(33, hits.Length(), "wrong number of hits");
@@ -253,7 +272,7 @@
 			searcher.Close();
 			
 			// optimize
-			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
+			IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false);
 			writer.Optimize();
 			writer.Close();
 			
@@ -270,6 +289,10 @@
 		public virtual void  CreateIndex(System.String dirName, bool doCFS)
 		{
 			
+			RmDir(dirName);
+			
+			dirName = FullDir(dirName);
+			
 			Directory dir = FSDirectory.GetDirectory(dirName);
 			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
 			writer.SetUseCompoundFile(doCFS);
@@ -293,66 +316,85 @@
 		}
 		
 		/* Verifies that the expected file names were produced */
-		
-		// disable until hardcoded file names are fixes:
-        [Test]
+
+		[Test]
 		public virtual void  TestExactFileNames()
 		{
 			
-			System.String outputDir = "lucene.backwardscompat0.index";
-			Directory dir = FSDirectory.GetDirectory(outputDir);
-			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
-			for (int i = 0; i < 35; i++)
+			for (int pass = 0; pass < 2; pass++)
 			{
-				AddDoc(writer, i);
-			}
-			Assert.AreEqual(35, writer.DocCount(), "wrong doc count");
-			writer.Close();
-			
-			// Delete one doc so we get a .del file:
-			IndexReader reader = IndexReader.Open(dir);
-			Term searchTerm = new Term("id", "7");
-			int delCount = reader.DeleteDocuments(searchTerm);
-			Assert.AreEqual(1, delCount, "didn't delete the right number of documents");
-			
-			// Set one norm so we get a .s0 file:
-			reader.SetNorm(21, "content", (float) 1.5);
-			reader.Close();
-			
-			// The numbering of fields can vary depending on which
-			// JRE is in use.  On some JREs we see content bound to
-			// field 0; on others, field 1.  So, here we have to
-			// figure out which field number corresponds to
-			// "content", and then set our expected file names below
-			// accordingly:
-			CompoundFileReader cfsReader = new CompoundFileReader(dir, "_2.cfs");
-			FieldInfos fieldInfos = new FieldInfos(cfsReader, "_2.fnm");
-			int contentFieldIndex = - 1;
-			for (int i = 0; i < fieldInfos.Size(); i++)
-			{
-				FieldInfo fi = fieldInfos.FieldInfo(i);
-				if (fi.Name.Equals("content"))
+				
+				System.String outputDir = "lucene.backwardscompat0.index";
+				RmDir(outputDir);
+				
+				try
 				{
-					contentFieldIndex = i;
-					break;
+					Directory dir = FSDirectory.GetDirectory(FullDir(outputDir));
+					
+					bool autoCommit = 0 == pass;
+					
+					IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+					writer.SetRAMBufferSizeMB(16.0);
+					//IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+					for (int i = 0; i < 35; i++)
+					{
+						AddDoc(writer, i);
+					}
+					Assert.AreEqual(35, writer.DocCount());
+					writer.Close();
+					
+					// Delete one doc so we get a .del file:
+					IndexReader reader = IndexReader.Open(dir);
+					Term searchTerm = new Term("id", "7");
+					int delCount = reader.DeleteDocuments(searchTerm);
+					Assert.AreEqual(1, delCount, "didn't delete the right number of documents");
+					
+					// Set one norm so we get a .s0 file:
+					reader.SetNorm(21, "content", (float) 1.5);
+					reader.Close();
+					
+					// The numbering of fields can vary depending on which
+					// JRE is in use.  On some JREs we see content bound to
+					// field 0; on others, field 1.  So, here we have to
+					// figure out which field number corresponds to
+					// "content", and then set our expected file names below
+					// accordingly:
+					CompoundFileReader cfsReader = new CompoundFileReader(dir, "_0.cfs");
+					FieldInfos fieldInfos = new FieldInfos(cfsReader, "_0.fnm");
+					int contentFieldIndex = - 1;
+					for (int i = 0; i < fieldInfos.Size(); i++)
+					{
+						FieldInfo fi = fieldInfos.FieldInfo(i);
+						if (fi.Name_ForNUnitTest.Equals("content"))
+						{
+							contentFieldIndex = i;
+							break;
+						}
+					}
+					cfsReader.Close();
+					Assert.IsTrue(contentFieldIndex != - 1, "could not locate the 'content' field number in the _2.cfs segment");
+					
+					// Now verify file names:
+					System.String[] expected;
+					expected = new System.String[]{"_0.cfs", "_0_1.del", "_0_1.s" + contentFieldIndex, "segments_4", "segments.gen"};
+					
+					if (!autoCommit)
+						expected[3] = "segments_3";
+					
+					System.String[] actual = dir.List();
+					System.Array.Sort(expected);
+					System.Array.Sort(actual);
+					if (!ArrayEquals(expected, actual))
+					{
+						Assert.Fail("incorrect filenames in index: expected:\n    " + AsString(expected) + "\n  actual:\n    " + AsString(actual));
+					}
+					dir.Close();
+				}
+				finally
+				{
+					RmDir(outputDir);
 				}
 			}
-			cfsReader.Close();
-			Assert.IsTrue(contentFieldIndex != - 1, "could not locate the 'content' field number in the _2.cfs segment");
-			
-			// Now verify file names:
-			System.String[] expected = new System.String[]{"_0.cfs", "_0_1.del", "_1.cfs", "_2.cfs", "_2_1.s" + contentFieldIndex, "_3.cfs", "segments_a", "segments.gen"};
-			
-			System.String[] actual = dir.List();
-			System.Array.Sort(expected);
-			System.Array.Sort(actual);
-			if (!ArrayEquals(expected, actual))
-			{
-				Assert.Fail("incorrect filenames in index: expected:\n    " + AsString(expected) + "\n  actual:\n    " + AsString(actual));
-			}
-			dir.Close();
-			
-			RmDir(outputDir);
 		}
 		
 		private System.String AsString(System.String[] l)
@@ -379,7 +421,7 @@
 		
 		private void  RmDir(System.String dir)
 		{
-			System.IO.FileInfo fileDir = new System.IO.FileInfo(dir);
+			System.IO.FileInfo fileDir = new System.IO.FileInfo(FullDir(dir));
 			bool tmpBool;
 			if (System.IO.File.Exists(fileDir.FullName))
 				tmpBool = true;
@@ -387,7 +429,7 @@
 				tmpBool = System.IO.Directory.Exists(fileDir.FullName);
 			if (tmpBool)
 			{
-                System.String[] files = System.IO.Directory.GetFileSystemEntries(fileDir.FullName);
+				System.String[] files = System.IO.Directory.GetFileSystemEntries(fileDir.FullName);
 				if (files != null)
 				{
 					for (int i = 0; i < files.Length; i++)
@@ -424,29 +466,34 @@
 				bool generatedAux2 = tmpBool3;
 			}
 		}
+		
+		public static System.String FullDir(System.String dirName)
+		{
+			return new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", ""), dirName)).FullName;
+		}
 
-        public static bool ArrayEquals(System.Array array1, System.Array array2)
-        {
-            bool result = false;
-            if ((array1 == null) && (array2 == null))
-                result = true;
-            else if ((array1 != null) && (array2 != null))
-            {
-                if (array1.Length == array2.Length)
-                {
-                    int length = array1.Length;
-                    result = true;
-                    for (int index = 0; index < length; index++)
-                    {
-                        if (!(array1.GetValue(index).Equals(array2.GetValue(index))))
-                        {
-                            result = false;
-                            break;
-                        }
-                    }
-                }
-            }
-            return result;
-        }
-    }
+		public static bool ArrayEquals(System.Array array1, System.Array array2)
+		{
+			bool result = false;
+			if ((array1 == null) && (array2 == null))
+				result = true;
+			else if ((array1 != null) && (array2 != null))
+			{
+				if (array1.Length == array2.Length)
+				{
+					int length = array1.Length;
+					result = true;
+					for (int index = 0; index < length; index++)
+					{
+						if (!(array1.GetValue(index).Equals(array2.GetValue(index))))
+						{
+							result = false;
+							break;
+						}
+					}
+				}
+			}
+			return result;
+		}
+	}
 }
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestCheckIndex.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestCheckIndex.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestCheckIndex.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestCheckIndex.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+
+namespace Lucene.Net.Index
+{
+	
+	[TestFixture]
+	public class TestCheckIndex : LuceneTestCase
+	{
+		
+		[Test]
+		public virtual void  TestDeletedDocs()
+		{
+			MockRAMDirectory dir = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			writer.SetMaxBufferedDocs(2);
+			Document doc = new Document();
+			doc.Add(new Field("field", "aaa", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+			for (int i = 0; i < 19; i++)
+			{
+				writer.AddDocument(doc);
+			}
+			writer.Close();
+			IndexReader reader = IndexReader.Open(dir);
+			reader.DeleteDocument(5);
+			reader.Close();
+			
+			CheckIndex.out_Renamed = new System.IO.StringWriter();
+			bool condition = CheckIndex.Check(dir, false);
+			String message = CheckIndex.out_Renamed.ToString();
+			Assert.IsTrue(condition, message);
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestCompoundFile.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestCompoundFile.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestCompoundFile.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestCompoundFile.cs Tue Jul 15 14:44:04 2008
@@ -19,22 +19,24 @@
 
 using NUnit.Framework;
 
+//using TestRunner = junit.textui.TestRunner;
 using Directory = Lucene.Net.Store.Directory;
 using FSDirectory = Lucene.Net.Store.FSDirectory;
 using IndexInput = Lucene.Net.Store.IndexInput;
 using IndexOutput = Lucene.Net.Store.IndexOutput;
 using _TestHelper = Lucene.Net.Store._TestHelper;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using _TestUtil = Lucene.Net.Util._TestUtil;
 
 namespace Lucene.Net.Index
 {
 	
 	
-	/// <author>  dmitrys@earthlink.net
-	/// </author>
-	/// <version>  $Id: TestCompoundFile.java 208807 2005-07-01 22:13:53Z dnaber $
+	/// <summary> </summary>
+	/// <version>  $Id: TestCompoundFile.java 602165 2007-12-07 17:42:33Z mikemccand $
 	/// </version>
 	[TestFixture]
-    public class TestCompoundFile
+	public class TestCompoundFile : LuceneTestCase
 	{
 		/// <summary>Main for running test case by itself. </summary>
 		[STAThread]
@@ -58,12 +60,12 @@
 		private Directory dir;
 		
 		[SetUp]
-		public virtual void  SetUp()
+		public override void SetUp()
 		{
-            System.IO.FileInfo file = new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", ""), "testIndex"));
-            Lucene.Net.Util._TestUtil.RmDir(file);
-            dir = FSDirectory.GetDirectory(file);
-        }
+			System.IO.FileInfo file = new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", ""), "testIndex"));
+			Lucene.Net.Util._TestUtil.RmDir(file);
+			dir = FSDirectory.GetDirectory(file);
+		}
 		
 		
 		/// <summary>Creates a file of the specified size with random data. </summary>
@@ -177,7 +179,7 @@
 		/// Files of different sizes are tested: 0, 1, 10, 100 bytes.
 		/// </summary>
 		[Test]
-        public virtual void  TestSingleFile()
+		public virtual void  TestSingleFile()
 		{
 			int[] data = new int[]{0, 1, 10, 100};
 			for (int i = 0; i < data.Length; i++)
@@ -204,7 +206,7 @@
 		/// 
 		/// </summary>
 		[Test]
-        public virtual void  TestTwoFiles()
+		public virtual void  TestTwoFiles()
 		{
 			CreateSequenceFile(dir, "d1", (byte) 0, 15);
 			CreateSequenceFile(dir, "d2", (byte) 0, 114);
@@ -238,7 +240,7 @@
 		/// the length of the buffer used internally by the compound file logic.
 		/// </summary>
 		[Test]
-        public virtual void  TestRandomFiles()
+		public virtual void  TestRandomFiles()
 		{
 			// Setup the test segment
 			System.String segment = "test";
@@ -337,7 +339,7 @@
 				b = in_Renamed.ReadByte();
 				Assert.Fail("expected readByte() to throw exception");
 			}
-			catch (System.Exception e)
+			catch (System.Exception)
 			{
 				// expected exception
 			}
@@ -426,7 +428,7 @@
 		/// their file positions are independent of each other.
 		/// </summary>
 		[Test]
-        public virtual void  TestRandomAccess()
+		public virtual void  TestRandomAccess()
 		{
 			SetUp_2();
 			CompoundFileReader cr = new CompoundFileReader(dir, "f.comp");
@@ -507,7 +509,7 @@
 		/// their file positions are independent of each other.
 		/// </summary>
 		[Test]
-        public virtual void  TestRandomAccessClones()
+		public virtual void  TestRandomAccessClones()
 		{
 			SetUp_2();
 			CompoundFileReader cr = new CompoundFileReader(dir, "f.comp");
@@ -596,7 +598,7 @@
 				IndexInput e1 = cr.OpenInput("bogus");
 				Assert.Fail("File not found");
 			}
-			catch (System.IO.IOException e)
+			catch (System.IO.IOException)
 			{
 				/* success */
 				//System.out.println("SUCCESS: File Not Found: " + e);
@@ -620,7 +622,7 @@
 				byte test = is_Renamed.ReadByte();
 				Assert.Fail("Single byte read past end of file");
 			}
-			catch (System.IO.IOException e)
+			catch (System.IO.IOException)
 			{
 				/* success */
 				//System.out.println("SUCCESS: single byte read past end of file: " + e);
@@ -632,7 +634,7 @@
 				is_Renamed.ReadBytes(b, 0, 50);
 				Assert.Fail("Block read past end of file");
 			}
-			catch (System.IO.IOException e)
+			catch (System.IO.IOException)
 			{
 				/* success */
 				//System.out.println("SUCCESS: block read past end of file: " + e);
@@ -642,31 +644,31 @@
 			cr.Close();
 		}
 
-        /// <summary>This test that writes larger than the size of the buffer output
-        /// will correctly increment the file pointer.
-        /// </summary>
-        [Test]
-        public virtual void  TestLargeWrites()
-                                               {
-            IndexOutput os = dir.CreateOutput("testBufferStart.txt");
+		/// <summary>This test that writes larger than the size of the buffer output
+		/// will correctly increment the file pointer.
+		/// </summary>
+		[Test]
+		public virtual void  TestLargeWrites()
+											   {
+			IndexOutput os = dir.CreateOutput("testBufferStart.txt");
 
-            byte[] largeBuf = new byte[2048];
-            for (int i=0; i<largeBuf.Length; i++)
-            {
-                largeBuf[i] = (byte) ((new System.Random().NextDouble()) * 256);
-            }
+			byte[] largeBuf = new byte[2048];
+			for (int i=0; i<largeBuf.Length; i++)
+			{
+				largeBuf[i] = (byte) ((new System.Random().NextDouble()) * 256);
+			}
 
-            long currentPos = os.GetFilePointer();
-            os.WriteBytes(largeBuf, largeBuf.Length);
+			long currentPos = os.GetFilePointer();
+			os.WriteBytes(largeBuf, largeBuf.Length);
 
-            try
-            {
-                Assert.AreEqual(currentPos + largeBuf.Length, os.GetFilePointer());
-            }
-            finally
-            {
-                os.Close();
-            }
-        }
- 	}
+			try
+			{
+				Assert.AreEqual(currentPos + largeBuf.Length, os.GetFilePointer());
+			}
+			finally
+			{
+				os.Close();
+			}
+		}
+	}
 }
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestConcurrentMergeScheduler.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestConcurrentMergeScheduler.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestConcurrentMergeScheduler.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestConcurrentMergeScheduler.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,261 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+
+namespace Lucene.Net.Index
+{
+	
+	[TestFixture]
+	public class TestConcurrentMergeScheduler : LuceneTestCase
+	{
+		
+		private static readonly Analyzer ANALYZER = new SimpleAnalyzer();
+		
+		private class FailOnlyOnFlush : MockRAMDirectory.Failure
+		{
+			new internal bool doFail = false;
+			
+			public override void  SetDoFail()
+			{
+				this.doFail = true;
+			}
+			public override void  ClearDoFail()
+			{
+				this.doFail = false;
+			}
+
+			public override void Eval(MockRAMDirectory dir)
+			{
+				if (doFail)
+				{
+					// {{DOUG-2.3.1}} this code is suspect.  i have preserved the original (below) for 
+					// comparative purposes.
+					if (new System.Exception().StackTrace.Contains("doFlush"))
+						throw new System.IO.IOException("now failing during flush");
+					//StackTraceElement[] trace = new System.Exception().getStackTrace();
+					//for (int i = 0; i < trace.Length; i++)
+					//{
+					//    if ("doFlush".Equals(trace[i].getMethodName()))
+					//    {
+					//        //new RuntimeException().printStackTrace(System.out);
+					//        throw new System.IO.IOException("now failing during flush");
+					//    }
+					//}
+				}
+			}
+		}
+		
+		// Make sure running BG merges still work fine even when
+		// we are hitting exceptions during flushing.
+		[Test]
+		public virtual void  TestFlushExceptions()
+		{
+			
+			MockRAMDirectory directory = new MockRAMDirectory();
+			FailOnlyOnFlush failure = new FailOnlyOnFlush();
+			directory.FailOn(failure);
+			
+			IndexWriter writer = new IndexWriter(directory, ANALYZER, true);
+			ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
+			writer.SetMergeScheduler(cms);
+			writer.SetMaxBufferedDocs(2);
+			Document doc = new Document();
+			Field idField = new Field("id", "", Field.Store.YES, Field.Index.UN_TOKENIZED);
+			doc.Add(idField);
+			for (int i = 0; i < 10; i++)
+			{
+				for (int j = 0; j < 20; j++)
+				{
+					idField.SetValue(System.Convert.ToString(i * 20 + j));
+					writer.AddDocument(doc);
+				}
+				
+				writer.AddDocument(doc);
+				
+				failure.SetDoFail();
+				try
+				{
+					writer.Flush();
+					Assert.Fail("failed to hit IOException");
+				}
+				catch (System.IO.IOException)
+				{
+					failure.ClearDoFail();
+				}
+			}
+			
+			writer.Close();
+			IndexReader reader = IndexReader.Open(directory);
+			Assert.AreEqual(200, reader.NumDocs());
+			reader.Close();
+			directory.Close();
+		}
+		
+		// Test that deletes committed after a merge started and
+		// before it finishes, are correctly merged back:
+		[Test]
+		public virtual void  TestDeleteMerging()
+		{
+			
+			RAMDirectory directory = new MockRAMDirectory();
+			
+			IndexWriter writer = new IndexWriter(directory, ANALYZER, true);
+			ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
+			writer.SetMergeScheduler(cms);
+			
+			LogDocMergePolicy mp = new LogDocMergePolicy();
+			writer.SetMergePolicy(mp);
+			
+			// Force degenerate merging so we can get a mix of
+			// merging of segments with and without deletes at the
+			// start:
+			mp.SetMinMergeDocs(1000);
+			
+			Document doc = new Document();
+			Field idField = new Field("id", "", Field.Store.YES, Field.Index.UN_TOKENIZED);
+			doc.Add(idField);
+			for (int i = 0; i < 10; i++)
+			{
+				for (int j = 0; j < 100; j++)
+				{
+					idField.SetValue(System.Convert.ToString(i * 100 + j));
+					writer.AddDocument(doc);
+				}
+				
+				int delID = i;
+				while (delID < 100 * (1 + i))
+				{
+					writer.DeleteDocuments(new Term("id", "" + delID));
+					delID += 10;
+				}
+				
+				writer.Flush();
+			}
+			
+			writer.Close();
+			IndexReader reader = IndexReader.Open(directory);
+			// Verify that we did not lose any deletes...
+			Assert.AreEqual(450, reader.NumDocs());
+			reader.Close();
+			directory.Close();
+		}
+		
+		[Test]
+		public virtual void  TestNoExtraFiles()
+		{
+			
+			RAMDirectory directory = new MockRAMDirectory();
+			
+			for (int pass = 0; pass < 2; pass++)
+			{
+				
+				bool autoCommit = pass == 0;
+				IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true);
+				
+				for (int iter = 0; iter < 7; iter++)
+				{
+					ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
+					writer.SetMergeScheduler(cms);
+					writer.SetMaxBufferedDocs(2);
+					
+					for (int j = 0; j < 21; j++)
+					{
+						Document doc = new Document();
+						doc.Add(new Field("content", "a b c", Field.Store.NO, Field.Index.TOKENIZED));
+						writer.AddDocument(doc);
+					}
+					
+					writer.Close();
+					TestIndexWriter.AssertNoUnreferencedFiles(directory, "testNoExtraFiles autoCommit=" + autoCommit);
+					
+					// Reopen
+					writer = new IndexWriter(directory, autoCommit, ANALYZER, false);
+				}
+				
+				writer.Close();
+			}
+			
+			directory.Close();
+		}
+		
+		[Test]
+		public virtual void  TestNoWaitClose()
+		{
+			RAMDirectory directory = new MockRAMDirectory();
+			
+			Document doc = new Document();
+			Field idField = new Field("id", "", Field.Store.YES, Field.Index.UN_TOKENIZED);
+			doc.Add(idField);
+			
+			for (int pass = 0; pass < 2; pass++)
+			{
+				bool autoCommit = pass == 0;
+				IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true);
+				
+				for (int iter = 0; iter < 10; iter++)
+				{
+					ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
+					writer.SetMergeScheduler(cms);
+					writer.SetMaxBufferedDocs(2);
+					writer.SetMergeFactor(100);
+					
+					for (int j = 0; j < 201; j++)
+					{
+						idField.SetValue(System.Convert.ToString(iter * 201 + j));
+						writer.AddDocument(doc);
+					}
+					
+					int delID = iter * 201;
+					for (int j = 0; j < 20; j++)
+					{
+						writer.DeleteDocuments(new Term("id", System.Convert.ToString(delID)));
+						delID += 5;
+					}
+					
+					// Force a bunch of merge threads to kick off so we
+					// stress out aborting them on close:
+					writer.SetMergeFactor(3);
+					writer.AddDocument(doc);
+					writer.Flush();
+					
+					writer.Close(false);
+					
+					IndexReader reader = IndexReader.Open(directory);
+					Assert.AreEqual((1 + iter) * 182, reader.NumDocs());
+					reader.Close();
+					
+					// Reopen
+					writer = new IndexWriter(directory, autoCommit, ANALYZER, false);
+				}
+				writer.Close();
+			}
+			
+			directory.Close();
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestDeletionPolicy.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestDeletionPolicy.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestDeletionPolicy.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestDeletionPolicy.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,790 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Hits = Lucene.Net.Search.Hits;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using Query = Lucene.Net.Search.Query;
+using TermQuery = Lucene.Net.Search.TermQuery;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Index
+{
+	
+	/*
+	Verify we can read the pre-2.1 file format, do searches
+	against it, and add documents to it.*/
+	
+	[TestFixture]
+	public class TestDeletionPolicy : LuceneTestCase
+	{
+		private void  VerifyCommitOrder(System.Collections.IList commits)
+		{
+			long last = SegmentInfos.GenerationFromSegmentsFileName(((IndexCommitPoint) commits[0]).GetSegmentsFileName());
+			for (int i = 1; i < commits.Count; i++)
+			{
+				long now = SegmentInfos.GenerationFromSegmentsFileName(((IndexCommitPoint) commits[i]).GetSegmentsFileName());
+				Assert.IsTrue(now > last, "SegmentInfos commits are out-of-order");
+				last = now;
+			}
+		}
+		
+		internal class KeepAllDeletionPolicy : IndexDeletionPolicy
+		{
+			public KeepAllDeletionPolicy(TestDeletionPolicy enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestDeletionPolicy enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestDeletionPolicy enclosingInstance;
+			public TestDeletionPolicy Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal int numOnInit;
+			internal int numOnCommit;
+			public virtual void  OnInit(System.Collections.IList commits)
+			{
+				Enclosing_Instance.VerifyCommitOrder(commits);
+				numOnInit++;
+			}
+			public virtual void  OnCommit(System.Collections.IList commits)
+			{
+				Enclosing_Instance.VerifyCommitOrder(commits);
+				numOnCommit++;
+			}
+		}
+		
+		/// <summary> This is useful for adding to a big index w/ autoCommit
+		/// false when you know readers are not using it.
+		/// </summary>
+		internal class KeepNoneOnInitDeletionPolicy : IndexDeletionPolicy
+		{
+			public KeepNoneOnInitDeletionPolicy(TestDeletionPolicy enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestDeletionPolicy enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestDeletionPolicy enclosingInstance;
+			public TestDeletionPolicy Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal int numOnInit;
+			internal int numOnCommit;
+			public virtual void  OnInit(System.Collections.IList commits)
+			{
+				Enclosing_Instance.VerifyCommitOrder(commits);
+				numOnInit++;
+				// On init, delete all commit points:
+				System.Collections.IEnumerator it = commits.GetEnumerator();
+				while (it.MoveNext())
+				{
+					((IndexCommitPoint) it.Current).Delete();
+				}
+			}
+			public virtual void  OnCommit(System.Collections.IList commits)
+			{
+				Enclosing_Instance.VerifyCommitOrder(commits);
+				int size = commits.Count;
+				// Delete all but last one:
+				for (int i = 0; i < size - 1; i++)
+				{
+					((IndexCommitPoint) commits[i]).Delete();
+				}
+				numOnCommit++;
+			}
+		}
+		
+		internal class KeepLastNDeletionPolicy : IndexDeletionPolicy
+		{
+			private void  InitBlock(TestDeletionPolicy enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestDeletionPolicy enclosingInstance;
+			public TestDeletionPolicy Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal int numOnInit;
+			internal int numOnCommit;
+			internal int numToKeep;
+			internal int numDelete;
+			internal System.Collections.Hashtable seen = new System.Collections.Hashtable();
+			
+			public KeepLastNDeletionPolicy(TestDeletionPolicy enclosingInstance, int numToKeep)
+			{
+				InitBlock(enclosingInstance);
+				this.numToKeep = numToKeep;
+			}
+			
+			public virtual void  OnInit(System.Collections.IList commits)
+			{
+				Enclosing_Instance.VerifyCommitOrder(commits);
+				numOnInit++;
+				// do no deletions on init
+				DoDeletes(commits, false);
+			}
+			
+			public virtual void  OnCommit(System.Collections.IList commits)
+			{
+				Enclosing_Instance.VerifyCommitOrder(commits);
+				DoDeletes(commits, true);
+			}
+			
+			private void  DoDeletes(System.Collections.IList commits, bool isCommit)
+			{
+				
+				// Assert that we really are only called for each new
+				// commit:
+				if (isCommit)
+				{
+					System.String fileName = ((IndexCommitPoint) commits[commits.Count - 1]).GetSegmentsFileName();
+					if (seen.Contains(fileName))
+					{
+						throw new System.SystemException("onCommit was called twice on the same commit point: " + fileName);
+					}
+					seen.Add(fileName, fileName);
+					numOnCommit++;
+				}
+				int size = commits.Count;
+				for (int i = 0; i < size - numToKeep; i++)
+				{
+					((IndexCommitPoint) commits[i]).Delete();
+					numDelete++;
+				}
+			}
+		}
+		
+		/*
+		* Delete a commit only when it has been obsoleted by N
+		* seconds.
+		*/
+		internal class ExpirationTimeDeletionPolicy : IndexDeletionPolicy
+		{
+			private void  InitBlock(TestDeletionPolicy enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestDeletionPolicy enclosingInstance;
+			public TestDeletionPolicy Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			
+			internal Directory dir;
+			internal double expirationTimeSeconds;
+			internal int numDelete;
+			
+			public ExpirationTimeDeletionPolicy(TestDeletionPolicy enclosingInstance, Directory dir, double seconds)
+			{
+				InitBlock(enclosingInstance);
+				this.dir = dir;
+				this.expirationTimeSeconds = seconds;
+			}
+			
+			public virtual void  OnInit(System.Collections.IList commits)
+			{
+				Enclosing_Instance.VerifyCommitOrder(commits);
+				OnCommit(commits);
+			}
+			
+			public virtual void  OnCommit(System.Collections.IList commits)
+			{
+				Enclosing_Instance.VerifyCommitOrder(commits);
+				
+				IndexCommitPoint lastCommit = (IndexCommitPoint) commits[commits.Count - 1];
+				
+				// Any commit older than expireTime should be deleted:
+				double expireTime = dir.FileModified(lastCommit.GetSegmentsFileName()) / 1000.0 - expirationTimeSeconds;
+				
+				System.Collections.IEnumerator it = commits.GetEnumerator();
+				
+				while (it.MoveNext())
+				{
+					IndexCommitPoint commit = (IndexCommitPoint) it.Current;
+					double modTime = dir.FileModified(commit.GetSegmentsFileName()) / 1000.0;
+					if (commit != lastCommit && modTime < expireTime)
+					{
+						commit.Delete();
+						numDelete += 1;
+					}
+				}
+			}
+		}
+		
+		/*
+		* Test "by time expiration" deletion policy:
+		*/
+		[Test]
+		public virtual void  TestExpirationTimeDeletionPolicy()
+		{
+			
+			double SECONDS = 2.0;
+			
+			bool autoCommit = false;
+			bool useCompoundFile = true;
+			
+			Directory dir = new RAMDirectory();
+			ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(this, dir, SECONDS);
+			IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+			writer.SetUseCompoundFile(useCompoundFile);
+			writer.Close();
+			
+			long lastDeleteTime = 0;
+			for (int i = 0; i < 7; i++)
+			{
+				// Record last time when writer performed deletes of
+				// past commits
+				lastDeleteTime = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
+				writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+				writer.SetUseCompoundFile(useCompoundFile);
+				for (int j = 0; j < 17; j++)
+				{
+					AddDoc(writer);
+				}
+				writer.Close();
+				
+				// Make sure to sleep long enough so that some commit
+				// points will be deleted:
+				System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * (int) (1000.0 * (SECONDS / 5.0))));
+			}
+			
+			// First, make sure the policy in fact deleted something:
+			Assert.IsTrue(policy.numDelete > 0, "no commits were deleted");
+			
+			// Then simplistic check: just verify that the
+			// segments_N's that still exist are in fact within SECONDS
+			// seconds of the last one's mod time, and, that I can
+			// open a reader on each:
+			long gen = SegmentInfos.GetCurrentSegmentGeneration(dir);
+			
+			System.String fileName = IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen);
+			dir.DeleteFile(IndexFileNames.SEGMENTS_GEN);
+			while (gen > 0)
+			{
+				try
+				{
+					IndexReader reader = IndexReader.Open(dir);
+					reader.Close();
+					fileName = IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen);
+					long modTime = dir.FileModified(fileName);
+					Assert.IsTrue(lastDeleteTime - modTime <= (SECONDS * 1000), "commit point was older than " + SECONDS + " seconds (" + (lastDeleteTime - modTime) + " msec) but did not get deleted");
+				}
+				catch (System.IO.IOException)
+				{
+					// OK
+					break;
+				}
+				
+				dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
+				gen--;
+			}
+			
+			dir.Close();
+		}
+		
+		/*
+		* Test a silly deletion policy that keeps all commits around.
+		*/
+		[Test]
+		public virtual void  TestKeepAllDeletionPolicy()
+		{
+			
+			for (int pass = 0; pass < 4; pass++)
+			{
+				
+				bool autoCommit = pass < 2;
+				bool useCompoundFile = (pass % 2) > 0;
+				
+				KeepAllDeletionPolicy policy = new KeepAllDeletionPolicy(this);
+				
+				Directory dir = new RAMDirectory();
+				
+				IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+				writer.SetMaxBufferedDocs(10);
+				writer.SetUseCompoundFile(useCompoundFile);
+				for (int i = 0; i < 107; i++)
+				{
+					AddDoc(writer);
+				}
+				writer.Close();
+				
+				writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+				writer.SetUseCompoundFile(useCompoundFile);
+				writer.Optimize();
+				writer.Close();
+				
+				Assert.AreEqual(2, policy.numOnInit);
+				if (autoCommit)
+				{
+					Assert.IsTrue(policy.numOnCommit > 2);
+				}
+				else
+				{
+					// If we are not auto committing then there should
+					// be exactly 2 commits (one per close above):
+					Assert.AreEqual(2, policy.numOnCommit);
+				}
+				
+				// Simplistic check: just verify all segments_N's still
+				// exist, and, I can open a reader on each:
+				dir.DeleteFile(IndexFileNames.SEGMENTS_GEN);
+				long gen = SegmentInfos.GetCurrentSegmentGeneration(dir);
+				while (gen > 0)
+				{
+					IndexReader reader = IndexReader.Open(dir);
+					reader.Close();
+					dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
+					gen--;
+					
+					if (gen > 0)
+					{
+						// Now that we've removed a commit point, which
+						// should have orphan'd at least one index file.
+						// Open & close a writer and assert that it
+						// actually removed something:
+						int preCount = dir.List().Length;
+						writer = new IndexWriter(dir, false, new WhitespaceAnalyzer(), false, policy);
+						writer.Close();
+						int postCount = dir.List().Length;
+						Assert.IsTrue(postCount < preCount);
+					}
+				}
+				
+				dir.Close();
+			}
+		}
+		
+		/* Test keeping NO commit points.  This is a viable and
+		* useful case eg where you want to build a big index with
+		* autoCommit false and you know there are no readers.
+		*/
+		[Test]
+		public virtual void  TestKeepNoneOnInitDeletionPolicy()
+		{
+			
+			for (int pass = 0; pass < 4; pass++)
+			{
+				
+				bool autoCommit = pass < 2;
+				bool useCompoundFile = (pass % 2) > 0;
+				
+				KeepNoneOnInitDeletionPolicy policy = new KeepNoneOnInitDeletionPolicy(this);
+				
+				Directory dir = new RAMDirectory();
+				
+				IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+				writer.SetMaxBufferedDocs(10);
+				writer.SetUseCompoundFile(useCompoundFile);
+				for (int i = 0; i < 107; i++)
+				{
+					AddDoc(writer);
+				}
+				writer.Close();
+				
+				writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+				writer.SetUseCompoundFile(useCompoundFile);
+				writer.Optimize();
+				writer.Close();
+				
+				Assert.AreEqual(2, policy.numOnInit);
+				if (autoCommit)
+				{
+					Assert.IsTrue(policy.numOnCommit > 2);
+				}
+				else
+				{
+					// If we are not auto committing then there should
+					// be exactly 2 commits (one per close above):
+					Assert.AreEqual(2, policy.numOnCommit);
+				}
+				
+				// Simplistic check: just verify the index is in fact
+				// readable:
+				IndexReader reader = IndexReader.Open(dir);
+				reader.Close();
+				
+				dir.Close();
+			}
+		}
+		
+		/*
+		* Test a deletion policy that keeps last N commits.
+		*/
+		[Test]
+		public virtual void  TestKeepLastNDeletionPolicy()
+		{
+			
+			int N = 5;
+			
+			for (int pass = 0; pass < 4; pass++)
+			{
+				
+				bool autoCommit = pass < 2;
+				bool useCompoundFile = (pass % 2) > 0;
+				
+				Directory dir = new RAMDirectory();
+				
+				KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(this, N);
+				
+				for (int j = 0; j < N + 1; j++)
+				{
+					IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+					writer.SetMaxBufferedDocs(10);
+					writer.SetUseCompoundFile(useCompoundFile);
+					for (int i = 0; i < 17; i++)
+					{
+						AddDoc(writer);
+					}
+					writer.Optimize();
+					writer.Close();
+				}
+				
+				Assert.IsTrue(policy.numDelete > 0);
+				Assert.AreEqual(N + 1, policy.numOnInit);
+				if (autoCommit)
+				{
+					Assert.IsTrue(policy.numOnCommit > 1);
+				}
+				else
+				{
+					Assert.AreEqual(N + 1, policy.numOnCommit);
+				}
+				
+				// Simplistic check: just verify only the past N segments_N's still
+				// exist, and, I can open a reader on each:
+				dir.DeleteFile(IndexFileNames.SEGMENTS_GEN);
+				long gen = SegmentInfos.GetCurrentSegmentGeneration(dir);
+				for (int i = 0; i < N + 1; i++)
+				{
+					try
+					{
+						IndexReader reader = IndexReader.Open(dir);
+						reader.Close();
+						if (i == N)
+						{
+							Assert.Fail("should have failed on commits prior to last " + N);
+						}
+					}
+					catch (System.IO.IOException e)
+					{
+						if (i != N)
+						{
+							throw e;
+						}
+					}
+					if (i < N)
+					{
+						dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
+					}
+					gen--;
+				}
+				
+				dir.Close();
+			}
+		}
+		
+		/*
+		* Test a deletion policy that keeps last N commits
+		* around, with reader doing deletes.
+		*/
+		[Test]
+		public virtual void  TestKeepLastNDeletionPolicyWithReader()
+		{
+			
+			int N = 10;
+			
+			for (int pass = 0; pass < 4; pass++)
+			{
+				
+				bool autoCommit = pass < 2;
+				bool useCompoundFile = (pass % 2) > 0;
+				
+				KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(this, N);
+				
+				Directory dir = new RAMDirectory();
+				IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+				writer.SetUseCompoundFile(useCompoundFile);
+				writer.Close();
+				Term searchTerm = new Term("content", "aaa");
+				Query query = new TermQuery(searchTerm);
+				
+				for (int i = 0; i < N + 1; i++)
+				{
+					writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+					writer.SetUseCompoundFile(useCompoundFile);
+					for (int j = 0; j < 17; j++)
+					{
+						AddDoc(writer);
+					}
+					// this is a commit when autoCommit=false:
+					writer.Close();
+					IndexReader reader = IndexReader.Open(dir, policy);
+					reader.DeleteDocument(3 * i + 1);
+					reader.SetNorm(4 * i + 1, "content", 2.0F);
+					IndexSearcher searcher = new IndexSearcher(reader);
+					Hits hits = searcher.Search(query);
+					Assert.AreEqual(16 * (1 + i), hits.Length());
+					// this is a commit when autoCommit=false:
+					reader.Close();
+					searcher.Close();
+				}
+				writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+				writer.SetUseCompoundFile(useCompoundFile);
+				writer.Optimize();
+				// this is a commit when autoCommit=false:
+				writer.Close();
+				
+				Assert.AreEqual(2 * (N + 2), policy.numOnInit);
+				if (autoCommit)
+				{
+					Assert.IsTrue(policy.numOnCommit > 2 * (N + 2) - 1);
+				}
+				else
+				{
+					Assert.AreEqual(2 * (N + 2) - 1, policy.numOnCommit);
+				}
+				
+				IndexSearcher searcher2 = new IndexSearcher(dir);
+				Hits hits2 = searcher2.Search(query);
+				Assert.AreEqual(176, hits2.Length());
+				
+				// Simplistic check: just verify only the past N segments_N's still
+				// exist, and, I can open a reader on each:
+				long gen = SegmentInfos.GetCurrentSegmentGeneration(dir);
+				
+				dir.DeleteFile(IndexFileNames.SEGMENTS_GEN);
+				int expectedCount = 176;
+				
+				for (int i = 0; i < N + 1; i++)
+				{
+					try
+					{
+						IndexReader reader = IndexReader.Open(dir);
+						
+						// Work backwards in commits on what the expected
+						// count should be.  Only check this in the
+						// autoCommit false case:
+						if (!autoCommit)
+						{
+							searcher2 = new IndexSearcher(reader);
+							hits2 = searcher2.Search(query);
+							if (i > 1)
+							{
+								if (i % 2 == 0)
+								{
+									expectedCount += 1;
+								}
+								else
+								{
+									expectedCount -= 17;
+								}
+							}
+							Assert.AreEqual(expectedCount, hits2.Length());
+							searcher2.Close();
+						}
+						reader.Close();
+						if (i == N)
+						{
+							Assert.Fail("should have failed on commits before last 5");
+						}
+					}
+					catch (System.IO.IOException e)
+					{
+						if (i != N)
+						{
+							throw e;
+						}
+					}
+					if (i < N)
+					{
+						dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
+					}
+					gen--;
+				}
+				
+				dir.Close();
+			}
+		}
+		
+		/*
+		* Test a deletion policy that keeps last N commits
+		* around, through creates.
+		*/
+		[Test]
+		public virtual void  TestKeepLastNDeletionPolicyWithCreates()
+		{
+			
+			int N = 10;
+			
+			for (int pass = 0; pass < 4; pass++)
+			{
+				
+				bool autoCommit = pass < 2;
+				bool useCompoundFile = (pass % 2) > 0;
+				
+				KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(this, N);
+				
+				Directory dir = new RAMDirectory();
+				IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+				writer.SetMaxBufferedDocs(10);
+				writer.SetUseCompoundFile(useCompoundFile);
+				writer.Close();
+				Term searchTerm = new Term("content", "aaa");
+				Query query = new TermQuery(searchTerm);
+				
+				for (int i = 0; i < N + 1; i++)
+				{
+					
+					writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+					writer.SetMaxBufferedDocs(10);
+					writer.SetUseCompoundFile(useCompoundFile);
+					for (int j = 0; j < 17; j++)
+					{
+						AddDoc(writer);
+					}
+					// this is a commit when autoCommit=false:
+					writer.Close();
+					IndexReader reader = IndexReader.Open(dir, policy);
+					reader.DeleteDocument(3);
+					reader.SetNorm(5, "content", 2.0F);
+					IndexSearcher searcher = new IndexSearcher(reader);
+					Hits hits = searcher.Search(query);
+					Assert.AreEqual(16, hits.Length());
+					// this is a commit when autoCommit=false:
+					reader.Close();
+					searcher.Close();
+					
+					writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+					// This will not commit: there are no changes
+					// pending because we opened for "create":
+					writer.Close();
+				}
+				
+				Assert.AreEqual(1 + 3 * (N + 1), policy.numOnInit);
+				if (autoCommit)
+				{
+					Assert.IsTrue(policy.numOnCommit > 3 * (N + 1) - 1);
+				}
+				else
+				{
+					Assert.AreEqual(2 * (N + 1), policy.numOnCommit);
+				}
+				
+				IndexSearcher searcher2 = new IndexSearcher(dir);
+				Hits hits2 = searcher2.Search(query);
+				Assert.AreEqual(0, hits2.Length());
+				
+				// Simplistic check: just verify only the past N segments_N's still
+				// exist, and, I can open a reader on each:
+				long gen = SegmentInfos.GetCurrentSegmentGeneration(dir);
+				
+				dir.DeleteFile(IndexFileNames.SEGMENTS_GEN);
+				int expectedCount = 0;
+				
+				for (int i = 0; i < N + 1; i++)
+				{
+					try
+					{
+						IndexReader reader = IndexReader.Open(dir);
+						
+						// Work backwards in commits on what the expected
+						// count should be.  Only check this in the
+						// autoCommit false case:
+						if (!autoCommit)
+						{
+							searcher2 = new IndexSearcher(reader);
+							hits2 = searcher2.Search(query);
+							Assert.AreEqual(expectedCount, hits2.Length());
+							searcher2.Close();
+							if (expectedCount == 0)
+							{
+								expectedCount = 16;
+							}
+							else if (expectedCount == 16)
+							{
+								expectedCount = 17;
+							}
+							else if (expectedCount == 17)
+							{
+								expectedCount = 0;
+							}
+						}
+						reader.Close();
+						if (i == N)
+						{
+							Assert.Fail("should have failed on commits before last " + N);
+						}
+					}
+					catch (System.IO.IOException e)
+					{
+						if (i != N)
+						{
+							throw e;
+						}
+					}
+					if (i < N)
+					{
+						dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
+					}
+					gen--;
+				}
+				
+				dir.Close();
+			}
+		}
+		
+		private void  AddDoc(IndexWriter writer)
+		{
+			Document doc = new Document();
+			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.TOKENIZED));
+			writer.AddDocument(doc);
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestDoc.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestDoc.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestDoc.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestDoc.cs Tue Jul 15 14:44:04 2008
@@ -19,25 +19,27 @@
 
 using NUnit.Framework;
 
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
-using Analyzer = Lucene.Net.Analysis.Analyzer;
-using FSDirectory = Lucene.Net.Store.FSDirectory;
-using Directory = Lucene.Net.Store.Directory;
+//using TestRunner = junit.textui.TestRunner;
 using Document = Lucene.Net.Documents.Document;
-using Similarity = Lucene.Net.Search.Similarity;
+using Directory = Lucene.Net.Store.Directory;
+using FSDirectory = Lucene.Net.Store.FSDirectory;
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using FileDocument = Lucene.Net.Demo.FileDocument;
+using Similarity = Lucene.Net.Search.Similarity;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Index
 {
 	
 	
-	/// <summary>JUnit adaptation of an older test case DocTest.</summary>
-	/// <author>  dmitrys@earthlink.net
-	/// </author>
-	/// <version>  $Id: TestDoc.java 150536 2004-09-28 18:15:52Z cutting $
+	/// <summary>JUnit adaptation of an older test case DocTest.
+	/// 
+	/// </summary>
+	/// <version>  $Id: TestDoc.java 583534 2007-10-10 16:46:35Z mikemccand $
 	/// </version>
 	[TestFixture]
-    public class TestDoc
+	public class TestDoc
 	{
 		
 		/// <summary>Main for running test case by itself. </summary>
@@ -57,7 +59,7 @@
 		/// a few text files created in the current working directory.
 		/// </summary>
 		[SetUp]
-        public virtual void  SetUp()
+		public virtual void  SetUp()
 		{
 			workDir = new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", "tempDir"), "TestDoc"));
 			System.IO.Directory.CreateDirectory(workDir.FullName);
@@ -131,76 +133,74 @@
 		/// assert various things about the segment.
 		/// </summary>
 		[Test]
-        public virtual void  TestIndexAndMerge()
+		public virtual void  TestIndexAndMerge()
 		{
-            System.IO.MemoryStream sw = new System.IO.MemoryStream();
-            System.IO.StreamWriter out_Renamed = new System.IO.StreamWriter(sw);
+			System.IO.MemoryStream sw = new System.IO.MemoryStream();
+			System.IO.StreamWriter out_Renamed = new System.IO.StreamWriter(sw);
 			
-			Directory directory = FSDirectory.GetDirectory(indexDir, true);
-			directory.Close();
+			Directory directory = FSDirectory.GetDirectory(indexDir);
+			IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true);
 			
-            SegmentInfo si1 = IndexDoc("one", "test.txt");
-            PrintSegment(out_Renamed, si1);
+			SegmentInfo si1 = IndexDoc(writer, "test.txt");
+			PrintSegment(out_Renamed, si1);
 			
-            SegmentInfo si2 = IndexDoc("two", "test2.txt");
-            PrintSegment(out_Renamed, si2);
+			SegmentInfo si2 = IndexDoc(writer, "test2.txt");
+			PrintSegment(out_Renamed, si2);
+			writer.Close();
+			directory.Close();
 			
-            SegmentInfo siMerge = Merge(si1, si2, "merge", false);
-            PrintSegment(out_Renamed, siMerge);
+			SegmentInfo siMerge = Merge(si1, si2, "merge", false);
+			PrintSegment(out_Renamed, siMerge);
 			
-            SegmentInfo siMerge2 = Merge(si1, si2, "merge2", false);
-            PrintSegment(out_Renamed, siMerge2);
+			SegmentInfo siMerge2 = Merge(si1, si2, "merge2", false);
+			PrintSegment(out_Renamed, siMerge2);
 			
-            SegmentInfo siMerge3 = Merge(siMerge, siMerge2, "merge3", false);
-            PrintSegment(out_Renamed, siMerge3);
+			SegmentInfo siMerge3 = Merge(siMerge, siMerge2, "merge3", false);
+			PrintSegment(out_Renamed, siMerge3);
 			
 			out_Renamed.Close();
 			sw.Close();
-            System.String multiFileOutput = System.Text.ASCIIEncoding.ASCII.GetString(sw.ToArray());
-            //System.out.println(multiFileOutput);
+			System.String multiFileOutput = System.Text.ASCIIEncoding.ASCII.GetString(sw.ToArray());
+			//System.out.println(multiFileOutput);
 			
-            sw = new System.IO.MemoryStream();
-            out_Renamed = new System.IO.StreamWriter(sw);
+			sw = new System.IO.MemoryStream();
+			out_Renamed = new System.IO.StreamWriter(sw);
 			
-			directory = FSDirectory.GetDirectory(indexDir, true);
-			directory.Close();
+			directory = FSDirectory.GetDirectory(indexDir);
+			writer = new IndexWriter(directory, new SimpleAnalyzer(), true);
 			
-            si1 = IndexDoc("one", "test.txt");
-            PrintSegment(out_Renamed, si1);
+			si1 = IndexDoc(writer, "test.txt");
+			PrintSegment(out_Renamed, si1);
 			
-            si2 = IndexDoc("two", "test2.txt");
-            PrintSegment(out_Renamed, si2);
+			si2 = IndexDoc(writer, "test2.txt");
+			PrintSegment(out_Renamed, si2);
+			writer.Close();
+			directory.Close();
 			
-            siMerge = Merge(si1, si2, "merge", true);
-            PrintSegment(out_Renamed, siMerge);
+			siMerge = Merge(si1, si2, "merge", true);
+			PrintSegment(out_Renamed, siMerge);
 			
-            siMerge2 = Merge(si1, si2, "merge2", true);
-            PrintSegment(out_Renamed, siMerge2);
+			siMerge2 = Merge(si1, si2, "merge2", true);
+			PrintSegment(out_Renamed, siMerge2);
 			
-            siMerge3 = Merge(siMerge, siMerge2, "merge3", true);
-            PrintSegment(out_Renamed, siMerge3);
+			siMerge3 = Merge(siMerge, siMerge2, "merge3", true);
+			PrintSegment(out_Renamed, siMerge3);
 			
 			out_Renamed.Close();
 			sw.Close();
-            System.String singleFileOutput = System.Text.ASCIIEncoding.ASCII.GetString(sw.ToArray());
+			System.String singleFileOutput = System.Text.ASCIIEncoding.ASCII.GetString(sw.ToArray());
 			
 			Assert.AreEqual(multiFileOutput, singleFileOutput);
 		}
 		
 		
-		private SegmentInfo IndexDoc(System.String segment, System.String fileName)
+		private SegmentInfo IndexDoc(IndexWriter writer, System.String fileName)
 		{
-			Directory directory = FSDirectory.GetDirectory(indexDir, false);
-			Analyzer analyzer = new SimpleAnalyzer();
-			DocumentWriter writer = new DocumentWriter(directory, analyzer, Similarity.GetDefault(), 1000);
-			
-			System.IO.FileInfo file = new System.IO.FileInfo(System.IO.Path.Combine(workDir.FullName, fileName));
-			Lucene.Net.Documents.Document doc = FileDocument.Document(file);
-			
-			writer.AddDocument(segment, doc);
-			
-			directory.Close();
-            return new SegmentInfo(segment, 1, directory, false, false);
+			System.IO.FileInfo file = new System.IO.FileInfo(workDir.FullName + "\\" + fileName);
+			Document doc = FileDocument.Document(file);
+			writer.AddDocument(doc);
+			writer.Flush();
+			return writer.NewestSegment();
 		}
 		
 		
@@ -228,8 +228,8 @@
 			}
 			
 			directory.Close();
-            return new SegmentInfo(merged, si1.docCount + si2.docCount, directory, useCompoundFile, true);
-        }
+			return new SegmentInfo(merged, si1.docCount + si2.docCount, directory, useCompoundFile, true);
+		}
 		
 		
 		private void  PrintSegment(System.IO.StreamWriter out_Renamed, SegmentInfo si)

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestDocumentWriter.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs Tue Jul 15 14:44:04 2008
@@ -19,19 +19,20 @@
 
 using NUnit.Framework;
 
-using Analyzer = Lucene.Net.Analysis.Analyzer;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using TokenStream = Lucene.Net.Analysis.TokenStream;
-using WhitespaceTokenizer = Lucene.Net.Analysis.WhitespaceTokenizer;
-using Lucene.Net.Documents;
-using Similarity = Lucene.Net.Search.Similarity;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using Fieldable = Lucene.Net.Documents.Fieldable;
+using TermVector = Lucene.Net.Documents.Field.TermVector;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using Lucene.Net.Analysis;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 
 namespace Lucene.Net.Index
 {
 	
 	[TestFixture]
-	public class TestDocumentWriter
+	public class TestDocumentWriter : LuceneTestCase
 	{
 		private class AnonymousClassAnalyzer : Analyzer
 		{
@@ -52,8 +53,7 @@
 				}
 				
 			}
-
-            public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
+			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
 			{
 				return new WhitespaceTokenizer(reader);
 			}
@@ -63,48 +63,158 @@
 				return 500;
 			}
 		}
-		private RAMDirectory dir;
-		
-        // public TestDocumentWriter(System.String s)
-        // {
-        // }
 		
-        [SetUp]
-        public virtual void  SetUp()
+		private class AnonymousClassAnalyzer1 : Analyzer
 		{
-			dir = new RAMDirectory();
+			public AnonymousClassAnalyzer1(TestDocumentWriter enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			
+			private class AnonymousClassTokenFilter:TokenFilter
+			{
+				private void  InitBlock(AnonymousClassAnalyzer1 enclosingInstance)
+				{
+					this.enclosingInstance = enclosingInstance;
+				}
+				private AnonymousClassAnalyzer1 enclosingInstance;
+				public AnonymousClassAnalyzer1 Enclosing_Instance
+				{
+					get
+					{
+						return enclosingInstance;
+					}
+					
+				}
+				internal AnonymousClassTokenFilter(AnonymousClassAnalyzer1 enclosingInstance, Lucene.Net.Analysis.TokenStream Param1):base(Param1)
+				{
+					InitBlock(enclosingInstance);
+				}
+				internal bool first = true;
+				internal Token buffered;
+				
+				public override Token Next()
+				{
+					return input.Next();
+				}
+				
+				public override Token Next(Token result)
+				{
+					if (buffered != null)
+					{
+						Token t = buffered;
+						buffered = null;
+						return t;
+					}
+					Token t2 = input.Next(result);
+					if (t2 == null)
+						return null;
+					if (System.Char.IsDigit(t2.TermBuffer()[0]))
+					{
+						t2.SetPositionIncrement(t2.TermBuffer()[0] - '0');
+					}
+					if (first)
+					{
+						// set payload on first position only
+						t2.SetPayload(new Payload(new byte[]{100}));
+						first = false;
+					}
+					
+					// index a "synonym" for every token
+					buffered = (Token) t2.Clone();
+					buffered.SetPayload(null);
+					buffered.SetPositionIncrement(0);
+					buffered.SetTermBuffer(new char[]{'b'}, 0, 1);
+					
+					return t2;
+				}
+			}
+			private void  InitBlock(TestDocumentWriter enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestDocumentWriter enclosingInstance;
+			public TestDocumentWriter Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
+			{
+				return new AnonymousClassTokenFilter(this, new WhitespaceTokenizer(reader));
+			}
 		}
 		
-		[TearDown]
-        public virtual void  TearDown()
+		private class AnonymousClassTokenStream : TokenStream
 		{
+			public AnonymousClassTokenStream(TestDocumentWriter enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestDocumentWriter enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestDocumentWriter enclosingInstance;
+			public TestDocumentWriter Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			private System.String[] tokens = new System.String[]{"term1", "term2", "term3", "term2"};
+			private int index = 0;
 			
+			public override Token Next()
+			{
+				if (index == tokens.Length)
+				{
+					return null;
+				}
+				else
+				{
+					return new Token(tokens[index++], 0, 0);
+				}
+			}
+		}
+		private RAMDirectory dir;
+		
+		[SetUp]
+		public override void  SetUp()
+		{
+			dir = new RAMDirectory();
 		}
 		
 		[Test]
-        public virtual void  Test()
+		public virtual void  Test()
 		{
 			Assert.IsTrue(dir != null);
 		}
 		
 		[Test]
-        public virtual void  TestAddDocument()
+		public virtual void  TestAddDocument()
 		{
-			Lucene.Net.Documents.Document testDoc = new Lucene.Net.Documents.Document();
+			Document testDoc = new Document();
 			DocHelper.SetupDoc(testDoc);
 			Analyzer analyzer = new WhitespaceAnalyzer();
-			Lucene.Net.Search.Similarity similarity = Lucene.Net.Search.Similarity.GetDefault();
-			DocumentWriter writer = new DocumentWriter(dir, analyzer, similarity, 50);
-			System.String segName = "test";
-			writer.AddDocument(segName, testDoc);
+			IndexWriter writer = new IndexWriter(dir, analyzer, true);
+			writer.AddDocument(testDoc);
+			writer.Flush();
+			SegmentInfo info = writer.NewestSegment();
+			writer.Close();
 			//After adding the document, we should be able to read it back in
-			SegmentReader reader = SegmentReader.Get(new SegmentInfo(segName, 1, dir));
+			SegmentReader reader = SegmentReader.Get(info);
 			Assert.IsTrue(reader != null);
-			Lucene.Net.Documents.Document doc = reader.Document(0);
+			Document doc = reader.Document(0);
 			Assert.IsTrue(doc != null);
 			
 			//System.out.println("Document: " + doc);
-			Field[] fields = doc.GetFields("textField2");
+			Fieldable[] fields = doc.GetFields("textField2");
 			Assert.IsTrue(fields != null && fields.Length == 1);
 			Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.FIELD_2_TEXT));
 			Assert.IsTrue(fields[0].IsTermVectorStored());
@@ -126,31 +236,34 @@
 			Assert.IsTrue(fields != null && fields.Length == 1);
 			Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.FIELD_3_TEXT));
 			
-			// test that the norm file is not present if omitNorms is true
-			for (int i = 0; i < reader.FieldInfos.Size(); i++)
+			// test that the norms are not present in the segment if
+			// omitNorms is true
+			for (int i = 0; i < reader.FieldInfos().Size(); i++)
 			{
-				FieldInfo fi = reader.FieldInfos.FieldInfo(i);
+				FieldInfo fi = reader.FieldInfos().FieldInfo(i);
 				if (fi.IsIndexed())
 				{
-					Assert.IsTrue(fi.omitNorms == !dir.FileExists(segName + ".f" + i));
+					Assert.IsTrue(fi.omitNorms == !reader.HasNorms(fi.Name_ForNUnitTest));
 				}
 			}
 		}
 		
 		[Test]
-        public virtual void  TestPositionIncrementGap()
+		public virtual void  TestPositionIncrementGap()
 		{
 			Analyzer analyzer = new AnonymousClassAnalyzer(this);
 			
-			Lucene.Net.Search.Similarity similarity = Lucene.Net.Search.Similarity.GetDefault();
-			DocumentWriter writer = new DocumentWriter(dir, analyzer, similarity, 50);
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			IndexWriter writer = new IndexWriter(dir, analyzer, true);
+			
+			Document doc = new Document();
 			doc.Add(new Field("repeated", "repeated one", Field.Store.YES, Field.Index.TOKENIZED));
 			doc.Add(new Field("repeated", "repeated two", Field.Store.YES, Field.Index.TOKENIZED));
 			
-			System.String segName = "test";
-			writer.AddDocument(segName, doc);
-			SegmentReader reader = SegmentReader.Get(new SegmentInfo(segName, 1, dir));
+			writer.AddDocument(doc);
+			writer.Flush();
+			SegmentInfo info = writer.NewestSegment();
+			writer.Close();
+			SegmentReader reader = SegmentReader.Get(info);
 			
 			TermPositions termPositions = reader.TermPositions(new Term("repeated", "repeated"));
 			Assert.IsTrue(termPositions.Next());
@@ -159,5 +272,95 @@
 			Assert.AreEqual(0, termPositions.NextPosition());
 			Assert.AreEqual(502, termPositions.NextPosition());
 		}
+		
+		[Test]
+		public virtual void  TestTokenReuse()
+		{
+			Analyzer analyzer = new AnonymousClassAnalyzer1(this);
+			
+			IndexWriter writer = new IndexWriter(dir, analyzer, true);
+			
+			Document doc = new Document();
+			doc.Add(new Field("f1", "a 5 a a", Field.Store.YES, Field.Index.TOKENIZED));
+			
+			writer.AddDocument(doc);
+			writer.Flush();
+			SegmentInfo info = writer.NewestSegment();
+			writer.Close();
+			SegmentReader reader = SegmentReader.Get(info);
+			
+			TermPositions termPositions = reader.TermPositions(new Term("f1", "a"));
+			Assert.IsTrue(termPositions.Next());
+			int freq = termPositions.Freq();
+			Assert.AreEqual(3, freq);
+			Assert.AreEqual(0, termPositions.NextPosition());
+			Assert.AreEqual(true, termPositions.IsPayloadAvailable());
+			Assert.AreEqual(6, termPositions.NextPosition());
+			Assert.AreEqual(false, termPositions.IsPayloadAvailable());
+			Assert.AreEqual(7, termPositions.NextPosition());
+			Assert.AreEqual(false, termPositions.IsPayloadAvailable());
+		}
+		
+		
+		[Test]
+		public virtual void  TestPreAnalyzedField()
+		{
+			IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true);
+			Document doc = new Document();
+			
+			doc.Add(new Field("preanalyzed", new AnonymousClassTokenStream(this), TermVector.NO));
+			
+			writer.AddDocument(doc);
+			writer.Flush();
+			SegmentInfo info = writer.NewestSegment();
+			writer.Close();
+			SegmentReader reader = SegmentReader.Get(info);
+			
+			TermPositions termPositions = reader.TermPositions(new Term("preanalyzed", "term1"));
+			Assert.IsTrue(termPositions.Next());
+			Assert.AreEqual(1, termPositions.Freq());
+			Assert.AreEqual(0, termPositions.NextPosition());
+			
+			termPositions.Seek(new Term("preanalyzed", "term2"));
+			Assert.IsTrue(termPositions.Next());
+			Assert.AreEqual(2, termPositions.Freq());
+			Assert.AreEqual(1, termPositions.NextPosition());
+			Assert.AreEqual(3, termPositions.NextPosition());
+			
+			termPositions.Seek(new Term("preanalyzed", "term3"));
+			Assert.IsTrue(termPositions.Next());
+			Assert.AreEqual(1, termPositions.Freq());
+			Assert.AreEqual(2, termPositions.NextPosition());
+		}
+		
+		/// <summary> Test adding two fields with the same name, but 
+		/// with different term vector setting (LUCENE-766).
+		/// </summary>
+		[Test]
+		public virtual void  TestMixedTermVectorSettingsSameField()
+		{
+			Document doc = new Document();
+			// f1 first without tv then with tv
+			doc.Add(new Field("f1", "v1", Field.Store.YES, Field.Index.UN_TOKENIZED, TermVector.NO));
+			doc.Add(new Field("f1", "v2", Field.Store.YES, Field.Index.UN_TOKENIZED, TermVector.WITH_POSITIONS_OFFSETS));
+			// f2 first with tv then without tv
+			doc.Add(new Field("f2", "v1", Field.Store.YES, Field.Index.UN_TOKENIZED, TermVector.WITH_POSITIONS_OFFSETS));
+			doc.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.UN_TOKENIZED, TermVector.NO));
+			
+			RAMDirectory ram = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(ram, new StandardAnalyzer(), true);
+			writer.AddDocument(doc);
+			writer.Close();
+			
+			IndexReader reader = IndexReader.Open(ram);
+			// f1
+			TermFreqVector tfv1 = reader.GetTermFreqVector(0, "f1");
+			Assert.IsNotNull(tfv1);
+			Assert.AreEqual(2, tfv1.GetTerms().Length, "the 'with_tv' setting should rule!");
+			// f2
+			TermFreqVector tfv2 = reader.GetTermFreqVector(0, "f2");
+			Assert.IsNotNull(tfv2);
+			Assert.AreEqual(2, tfv2.GetTerms().Length, "the 'with_tv' setting should rule!");
+		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldInfos.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestFieldInfos.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldInfos.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldInfos.cs Tue Jul 15 14:44:04 2008
@@ -20,8 +20,9 @@
 using NUnit.Framework;
 
 using Document = Lucene.Net.Documents.Document;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using IndexOutput = Lucene.Net.Store.IndexOutput;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Index
 {
@@ -29,28 +30,23 @@
 	
 	//import org.cnlp.utils.properties.ResourceBundleHelper;
 	[TestFixture]
-	public class TestFieldInfos
+	public class TestFieldInfos : LuceneTestCase
 	{
 		
 		private Lucene.Net.Documents.Document testDoc = new Lucene.Net.Documents.Document();
 		
-        // public TestFieldInfos(System.String s)
-        // {
-        // }
+		// public TestFieldInfos(System.String s)
+		// {
+		// }
 		
-        [SetUp]
-        public virtual void  SetUp()
+		[SetUp]
+		public override void SetUp()
 		{
 			DocHelper.SetupDoc(testDoc);
 		}
 		
-		[TearDown]
-        public virtual void  TearDown()
-		{
-		}
-		
 		[Test]
-        public virtual void  Test()
+		public virtual void  Test()
 		{
 			//Positive test of FieldInfos
 			Assert.IsTrue(testDoc != null);
@@ -93,7 +89,7 @@
 				
 				dir.Close();
 			}
-			catch (System.IO.IOException e)
+			catch (System.IO.IOException)
 			{
 				Assert.IsTrue(false);
 			}