You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by cc...@apache.org on 2011/11/16 06:24:28 UTC

[Lucene.Net] svn commit: r1202532 [1/3] - in /incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk: src/core/Analysis/ src/core/Index/ src/core/Search/ src/core/Store/ src/demo/Demo.Common/ test/core/ test/core/Index/ test/core/QueryParser/ test/core/Search/ test/c...

Author: ccurrens
Date: Wed Nov 16 05:24:27 2011
New Revision: 1202532

URL: http://svn.apache.org/viewvc?rev=1202532&view=rev
Log:
partial port of 3.0.3 now builds, many tests hang or fail

Modified:
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/CharArraySet.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/IndexReader.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/CachingWrapperFilter.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Store/Directory.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/demo/Demo.Common/Demo.Common.csproj
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterDelete.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterLockRelease.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterMergePolicy.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterMerging.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterReader.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestLazyBug.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestMultiLevelSkipList.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestMultiReader.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestNorms.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestPayloads.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestSegmentMerger.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestSegmentReader.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestSnapshotDeletionPolicy.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestStressIndexing.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestStressIndexing2.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestTermVectorsReader.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestThreadedOptimize.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Lucene.Net.Test.csproj
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/QueryParser/TestMultiAnalyzer.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/QueryParser/TestMultiFieldQueryParser.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/QueryParser/TestQueryParser.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMatchAllDocsQuery.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiPhraseQuery.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcher.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiSearcherRanking.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestMultiTermConstantScore.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestNot.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPhraseQuery.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestPositionIncrement.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestSimpleExplanations.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestTermRangeFilter.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestTermVectors.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestThreadSafe.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestTimeLimitingCollector.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Search/TestTopScoreDocCollector.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Store/TestFileSwitchDirectory.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/TestMergeSchedulerExternal.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/TestSearch.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/TestSearchForDuplicates.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/TestSupportClass.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/UpdatedTests.txt

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/CharArraySet.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/CharArraySet.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/CharArraySet.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/CharArraySet.cs Wed Nov 16 05:24:27 2011
@@ -69,6 +69,12 @@ namespace Lucene.Net.Analysis
             Init(startSize, ignoreCase);
         }
 
+        public CharArraySet(IEnumerable<string> c, bool ignoreCase)
+        {
+            Init(c.Count(), ignoreCase);
+            AddItems(c);
+        }
+
         /// <summary>Create set from a Collection of char[] or String </summary>
         public CharArraySet(IEnumerable<object> c, bool ignoreCase)
         {

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/IndexReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/IndexReader.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/IndexReader.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/IndexReader.cs Wed Nov 16 05:24:27 2011
@@ -126,7 +126,7 @@ namespace Lucene.Net.Index
 		
 		private int refCount;
 		
-		internal static int DEFAULT_TERMS_INDEX_DIVISOR = 1;
+		protected internal static int DEFAULT_TERMS_INDEX_DIVISOR = 1;
 		
 		/// <summary>Expert: returns the current refCount for this reader </summary>
 		public virtual int GetRefCount()

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/CachingWrapperFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/CachingWrapperFilter.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/CachingWrapperFilter.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Search/CachingWrapperFilter.cs Wed Nov 16 05:24:27 2011
@@ -32,7 +32,7 @@ namespace Lucene.Net.Search
 	[Serializable]
 	public class CachingWrapperFilter:Filter
 	{
-		internal Filter filter;
+		protected internal Filter filter;
 
         ///
         /// Expert: Specifies how new deletions against a reopened

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Store/Directory.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Store/Directory.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Store/Directory.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Store/Directory.cs Wed Nov 16 05:24:27 2011
@@ -135,7 +135,7 @@ namespace Lucene.Net.Store
 		/// </param>
 		public virtual void  SetLockFactory(LockFactory lockFactory)
 		{
-		    System.Diagnostics.Debug.Assert(this.lockFactory != null);
+		    System.Diagnostics.Debug.Assert(lockFactory != null);
 			this.lockFactory = lockFactory;
 			lockFactory.SetLockPrefix(this.GetLockID());
 		}

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/demo/Demo.Common/Demo.Common.csproj
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/demo/Demo.Common/Demo.Common.csproj?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/demo/Demo.Common/Demo.Common.csproj (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/demo/Demo.Common/Demo.Common.csproj Wed Nov 16 05:24:27 2011
@@ -19,7 +19,6 @@
  under the License.
 
 -->
-
 <Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0">
   <PropertyGroup>
     <ProjectType>Local</ProjectType>
@@ -33,8 +32,7 @@
     <AssemblyKeyContainerName>
     </AssemblyKeyContainerName>
     <AssemblyName>Lucene.Net.Demo.Common</AssemblyName>
-    <AssemblyOriginatorKeyFile>
-    </AssemblyOriginatorKeyFile>
+    <AssemblyOriginatorKeyFile>Lucene.Net.snk</AssemblyOriginatorKeyFile>
     <DefaultClientScript>JScript</DefaultClientScript>
     <DefaultHTMLPageLayout>Grid</DefaultHTMLPageLayout>
     <DefaultTargetSchema>IE50</DefaultTargetSchema>
@@ -48,7 +46,7 @@
     </FileUpgradeFlags>
     <UpgradeBackupLocation>
     </UpgradeBackupLocation>
-    <SignAssembly>false</SignAssembly>
+    <SignAssembly>true</SignAssembly>
     <TargetFrameworkVersion>v4.0</TargetFrameworkVersion>
     <OldToolsVersion>2.0</OldToolsVersion>
     <IsWebBootstrapper>false</IsWebBootstrapper>
@@ -165,6 +163,7 @@
       <SubType>Code</SubType>
     </Compile>
     <None Include="HTML\HTMLParser.jj" />
+    <None Include="Lucene.Net.snk" />
   </ItemGroup>
   <ItemGroup>
     <ProjectReference Include="..\..\core\Lucene.Net.csproj">

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterDelete.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterDelete.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterDelete.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterDelete.cs Wed Nov 16 05:24:27 2011
@@ -135,311 +135,284 @@ namespace Lucene.Net.Index
 		}
 		
 		// test the simple case
-		[Test]
-		public virtual void  TestSimpleCase()
-		{
-			System.String[] keywords = new System.String[]{"1", "2"};
-			System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
-			System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
-			System.String[] text = new System.String[]{"Amsterdam", "Venice"};
-			
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				
-				Directory dir = new MockRAMDirectory();
-				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				modifier.SetUseCompoundFile(true);
-				modifier.SetMaxBufferedDeleteTerms(1);
-				
-				for (int i = 0; i < keywords.Length; i++)
-				{
-					Document doc = new Document();
-					doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
-					doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
-					doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
-					doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
-					modifier.AddDocument(doc);
-				}
-				modifier.Optimize();
-				modifier.Commit();
-				
-				Term term = new Term("city", "Amsterdam");
-				int hitCount = GetHitCount(dir, term);
-				Assert.AreEqual(1, hitCount);
-				modifier.DeleteDocuments(term);
-				modifier.Commit();
-				hitCount = GetHitCount(dir, term);
-				Assert.AreEqual(0, hitCount);
-				
-				modifier.Close();
-				dir.Close();
-			}
-		}
-		
-		// test when delete terms only apply to disk segments
-		[Test]
-		public virtual void  TestNonRAMDelete()
-		{
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				
-				Directory dir = new MockRAMDirectory();
-				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				modifier.SetMaxBufferedDocs(2);
-				modifier.SetMaxBufferedDeleteTerms(2);
-				
-				int id = 0;
-				int value_Renamed = 100;
-				
-				for (int i = 0; i < 7; i++)
-				{
-					AddDoc(modifier, ++id, value_Renamed);
-				}
-				modifier.Commit();
-				
-				Assert.AreEqual(0, modifier.GetNumBufferedDocuments());
-				Assert.IsTrue(0 < modifier.GetSegmentCount());
-				
-				modifier.Commit();
-				
-				IndexReader reader = IndexReader.Open(dir);
-				Assert.AreEqual(7, reader.NumDocs());
-				reader.Close();
-				
-				modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-				
-				modifier.Commit();
-				
-				reader = IndexReader.Open(dir);
-				Assert.AreEqual(0, reader.NumDocs());
-				reader.Close();
-				modifier.Close();
-				dir.Close();
-			}
-		}
-		
-		[Test]
-		public virtual void  TestMaxBufferedDeletes()
-		{
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				Directory dir = new MockRAMDirectory();
-				IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				writer.SetMaxBufferedDeleteTerms(1);
-				writer.DeleteDocuments(new Term("foobar", "1"));
-				writer.DeleteDocuments(new Term("foobar", "1"));
-				writer.DeleteDocuments(new Term("foobar", "1"));
-				Assert.AreEqual(3, writer.GetFlushDeletesCount());
-				writer.Close();
-				dir.Close();
-			}
-		}
-		
-		// test when delete terms only apply to ram segments
+        [Test]
+        public virtual void TestSimpleCase()
+        {
+            System.String[] keywords = new System.String[] {"1", "2"};
+            System.String[] unindexed = new System.String[] {"Netherlands", "Italy"};
+            System.String[] unstored = new System.String[]
+                                           {"Amsterdam has lots of bridges", "Venice has lots of canals"};
+            System.String[] text = new System.String[] {"Amsterdam", "Venice"};
+
+            Directory dir = new MockRAMDirectory();
+            IndexWriter modifier = new IndexWriter(dir,  new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
+            modifier.SetUseCompoundFile(true);
+            modifier.SetMaxBufferedDeleteTerms(1);
+
+            for (int i = 0; i < keywords.Length; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
+                doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
+                doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
+                doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
+                modifier.AddDocument(doc);
+            }
+            modifier.Optimize();
+            modifier.Commit();
+
+            Term term = new Term("city", "Amsterdam");
+            int hitCount = GetHitCount(dir, term);
+            Assert.AreEqual(1, hitCount);
+            modifier.DeleteDocuments(term);
+            modifier.Commit();
+            hitCount = GetHitCount(dir, term);
+            Assert.AreEqual(0, hitCount);
+
+            modifier.Close();
+            dir.Close();
+        }
+
+        // test when delete terms only apply to disk segments
+        [Test]
+        public virtual void TestNonRAMDelete()
+        {
+            Directory dir = new MockRAMDirectory();
+            IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true,
+                                                   IndexWriter.MaxFieldLength.UNLIMITED);
+            modifier.SetMaxBufferedDocs(2);
+            modifier.SetMaxBufferedDeleteTerms(2);
+
+            int id = 0;
+            int value_Renamed = 100;
+
+            for (int i = 0; i < 7; i++)
+            {
+                AddDoc(modifier, ++id, value_Renamed);
+            }
+            modifier.Commit();
+
+            Assert.AreEqual(0, modifier.GetNumBufferedDocuments());
+            Assert.IsTrue(0 < modifier.GetSegmentCount());
+
+            modifier.Commit();
+
+            IndexReader reader = IndexReader.Open(dir, true);
+            Assert.AreEqual(7, reader.NumDocs());
+            reader.Close();
+
+            modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
+
+            modifier.Commit();
+
+            reader = IndexReader.Open(dir, true);
+            Assert.AreEqual(0, reader.NumDocs());
+            reader.Close();
+            modifier.Close();
+            dir.Close();
+        }
+
+        [Test]
+        public virtual void TestMaxBufferedDeletes()
+        {
+            Directory dir = new MockRAMDirectory();
+            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true,
+                                                 IndexWriter.MaxFieldLength.UNLIMITED);
+            writer.SetMaxBufferedDeleteTerms(1);
+            writer.DeleteDocuments(new Term("foobar", "1"));
+            writer.DeleteDocuments(new Term("foobar", "1"));
+            writer.DeleteDocuments(new Term("foobar", "1"));
+            Assert.AreEqual(3, writer.GetFlushDeletesCount());
+            writer.Close();
+            dir.Close();
+        }
+
+        // test when delete terms only apply to ram segments
 		[Test]
 		public virtual void  TestRAMDeletes()
 		{
-			for (int pass = 0; pass < 2; pass++)
-			{
-				for (int t = 0; t < 2; t++)
-				{
-					bool autoCommit = (0 == pass);
-					Directory dir = new MockRAMDirectory();
-					IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-					modifier.SetMaxBufferedDocs(4);
-					modifier.SetMaxBufferedDeleteTerms(4);
-					
-					int id = 0;
-					int value_Renamed = 100;
-					
-					AddDoc(modifier, ++id, value_Renamed);
-					if (0 == t)
-						modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-					else
-						modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
-					AddDoc(modifier, ++id, value_Renamed);
-					if (0 == t)
-					{
-						modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-						Assert.AreEqual(2, modifier.GetNumBufferedDeleteTerms());
-						Assert.AreEqual(1, modifier.GetBufferedDeleteTermsSize());
-					}
-					else
-						modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
-					
-					AddDoc(modifier, ++id, value_Renamed);
-					Assert.AreEqual(0, modifier.GetSegmentCount());
-					modifier.Flush();
-					
-					modifier.Commit();
-					
-					IndexReader reader = IndexReader.Open(dir);
-					Assert.AreEqual(1, reader.NumDocs());
-					
-					int hitCount = GetHitCount(dir, new Term("id", System.Convert.ToString(id)));
-					Assert.AreEqual(1, hitCount);
-					reader.Close();
-					modifier.Close();
-					dir.Close();
-				}
-			}
+            for (int t = 0; t < 2; t++)
+            {
+                Directory dir = new MockRAMDirectory();
+                IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
+                modifier.SetMaxBufferedDocs(4);
+                modifier.SetMaxBufferedDeleteTerms(4);
+
+                int id = 0;
+                int value_Renamed = 100;
+
+                AddDoc(modifier, ++id, value_Renamed);
+                if (0 == t)
+                    modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
+                else
+                    modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
+                AddDoc(modifier, ++id, value_Renamed);
+                if (0 == t)
+                {
+                    modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
+                    Assert.AreEqual(2, modifier.GetNumBufferedDeleteTerms());
+                    Assert.AreEqual(1, modifier.GetBufferedDeleteTermsSize());
+                }
+                else
+                    modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
+
+                AddDoc(modifier, ++id, value_Renamed);
+                Assert.AreEqual(0, modifier.GetSegmentCount());
+                modifier.Commit();
+
+                modifier.Commit();
+
+                IndexReader reader = IndexReader.Open(dir, true);
+                Assert.AreEqual(1, reader.NumDocs());
+
+                int hitCount = GetHitCount(dir, new Term("id", System.Convert.ToString(id)));
+                Assert.AreEqual(1, hitCount);
+                reader.Close();
+                modifier.Close();
+                dir.Close();
+            }
 		}
 		
 		// test when delete terms apply to both disk and ram segments
-		[Test]
-		public virtual void  TestBothDeletes()
-		{
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				
-				Directory dir = new MockRAMDirectory();
-				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				modifier.SetMaxBufferedDocs(100);
-				modifier.SetMaxBufferedDeleteTerms(100);
-				
-				int id = 0;
-				int value_Renamed = 100;
-				
-				for (int i = 0; i < 5; i++)
-				{
-					AddDoc(modifier, ++id, value_Renamed);
-				}
-				
-				value_Renamed = 200;
-				for (int i = 0; i < 5; i++)
-				{
-					AddDoc(modifier, ++id, value_Renamed);
-				}
-				modifier.Commit();
-				
-				for (int i = 0; i < 5; i++)
-				{
-					AddDoc(modifier, ++id, value_Renamed);
-				}
-				modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-				
-				modifier.Commit();
-				
-				IndexReader reader = IndexReader.Open(dir);
-				Assert.AreEqual(5, reader.NumDocs());
-				modifier.Close();
-			}
-		}
-		
-		// test that batched delete terms are flushed together
-		[Test]
-		public virtual void  TestBatchDeletes()
-		{
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				Directory dir = new MockRAMDirectory();
-				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				modifier.SetMaxBufferedDocs(2);
-				modifier.SetMaxBufferedDeleteTerms(2);
-				
-				int id = 0;
-				int value_Renamed = 100;
-				
-				for (int i = 0; i < 7; i++)
-				{
-					AddDoc(modifier, ++id, value_Renamed);
-				}
-				modifier.Commit();
-				
-				IndexReader reader = IndexReader.Open(dir);
-				Assert.AreEqual(7, reader.NumDocs());
-				reader.Close();
-				
-				id = 0;
-				modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
-				modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
-				
-				modifier.Commit();
-				
-				reader = IndexReader.Open(dir);
-				Assert.AreEqual(5, reader.NumDocs());
-				reader.Close();
-				
-				Term[] terms = new Term[3];
-				for (int i = 0; i < terms.Length; i++)
-				{
-					terms[i] = new Term("id", System.Convert.ToString(++id));
-				}
-				modifier.DeleteDocuments(terms);
-				modifier.Commit();
-				reader = IndexReader.Open(dir);
-				Assert.AreEqual(2, reader.NumDocs());
-				reader.Close();
-				
-				modifier.Close();
-				dir.Close();
-			}
-		}
-		
-		// test deleteAll()
+        [Test]
+        public virtual void TestBothDeletes()
+        {
+            Directory dir = new MockRAMDirectory();
+            IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
+            modifier.SetMaxBufferedDocs(100);
+            modifier.SetMaxBufferedDeleteTerms(100);
+
+            int id = 0;
+            int value_Renamed = 100;
+
+            for (int i = 0; i < 5; i++)
+            {
+                AddDoc(modifier, ++id, value_Renamed);
+            }
+
+            value_Renamed = 200;
+            for (int i = 0; i < 5; i++)
+            {
+                AddDoc(modifier, ++id, value_Renamed);
+            }
+            modifier.Commit();
+
+            for (int i = 0; i < 5; i++)
+            {
+                AddDoc(modifier, ++id, value_Renamed);
+            }
+            modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
+
+            modifier.Commit();
+
+            IndexReader reader = IndexReader.Open(dir, true);
+            Assert.AreEqual(5, reader.NumDocs());
+            modifier.Close();
+        }
+
+        // test that batched delete terms are flushed together
+        [Test]
+        public virtual void TestBatchDeletes()
+        {
+            Directory dir = new MockRAMDirectory();
+            IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true,
+                                                   IndexWriter.MaxFieldLength.UNLIMITED);
+            modifier.SetMaxBufferedDocs(2);
+            modifier.SetMaxBufferedDeleteTerms(2);
+
+            int id = 0;
+            int value_Renamed = 100;
+
+            for (int i = 0; i < 7; i++)
+            {
+                AddDoc(modifier, ++id, value_Renamed);
+            }
+            modifier.Commit();
+
+            IndexReader reader = IndexReader.Open(dir, true);
+            Assert.AreEqual(7, reader.NumDocs());
+            reader.Close();
+
+            id = 0;
+            modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
+            modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
+
+            modifier.Commit();
+
+            reader = IndexReader.Open(dir, true);
+            Assert.AreEqual(5, reader.NumDocs());
+            reader.Close();
+
+            Term[] terms = new Term[3];
+            for (int i = 0; i < terms.Length; i++)
+            {
+                terms[i] = new Term("id", System.Convert.ToString(++id));
+            }
+            modifier.DeleteDocuments(terms);
+            modifier.Commit();
+            reader = IndexReader.Open(dir, true);
+            Assert.AreEqual(2, reader.NumDocs());
+            reader.Close();
+
+            modifier.Close();
+            dir.Close();
+        }
+
+        // test deleteAll()
 		[Test]
 		public virtual void  TestDeleteAll()
 		{
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				Directory dir = new MockRAMDirectory();
-				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				modifier.SetMaxBufferedDocs(2);
-				modifier.SetMaxBufferedDeleteTerms(2);
-				
-				int id = 0;
-				int value_Renamed = 100;
-				
-				for (int i = 0; i < 7; i++)
-				{
-					AddDoc(modifier, ++id, value_Renamed);
-				}
-				modifier.Commit();
-				
-				IndexReader reader = IndexReader.Open(dir);
-				Assert.AreEqual(7, reader.NumDocs());
-				reader.Close();
-				
-				// Add 1 doc (so we will have something buffered)
-				AddDoc(modifier, 99, value_Renamed);
-				
-				// Delete all
-				modifier.DeleteAll();
-				
-				// Delete all shouldn't be on disk yet
-				reader = IndexReader.Open(dir);
-				Assert.AreEqual(7, reader.NumDocs());
-				reader.Close();
-				
-				// Add a doc and update a doc (after the deleteAll, before the commit)
-				AddDoc(modifier, 101, value_Renamed);
-				UpdateDoc(modifier, 102, value_Renamed);
-				
-				// commit the delete all
-				modifier.Commit();
-				
-				// Validate there are no docs left
-				reader = IndexReader.Open(dir);
-				Assert.AreEqual(2, reader.NumDocs());
-				reader.Close();
-				
-				modifier.Close();
-				dir.Close();
-			}
+		    Directory dir = new MockRAMDirectory();
+            IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
+		    modifier.SetMaxBufferedDocs(2);
+		    modifier.SetMaxBufferedDeleteTerms(2);
+
+		    int id = 0;
+		    int value_Renamed = 100;
+
+		    for (int i = 0; i < 7; i++)
+		    {
+		        AddDoc(modifier, ++id, value_Renamed);
+		    }
+		    modifier.Commit();
+
+		    IndexReader reader = IndexReader.Open(dir, true);
+		    Assert.AreEqual(7, reader.NumDocs());
+		    reader.Close();
+
+		    // Add 1 doc (so we will have something buffered)
+		    AddDoc(modifier, 99, value_Renamed);
+
+		    // Delete all
+		    modifier.DeleteAll();
+
+		    // Delete all shouldn't be on disk yet
+		    reader = IndexReader.Open(dir, true);
+		    Assert.AreEqual(7, reader.NumDocs());
+		    reader.Close();
+
+		    // Add a doc and update a doc (after the deleteAll, before the commit)
+		    AddDoc(modifier, 101, value_Renamed);
+		    UpdateDoc(modifier, 102, value_Renamed);
+
+		    // commit the delete all
+		    modifier.Commit();
+
+		    // Validate there are no docs left
+		    reader = IndexReader.Open(dir, true);
+		    Assert.AreEqual(2, reader.NumDocs());
+		    reader.Close();
+
+		    modifier.Close();
+		    dir.Close();
 		}
-		
-		// test rollback of deleteAll()
+
+        // test rollback of deleteAll()
 		[Test]
 		public virtual void  TestDeleteAllRollback()
 		{
 			Directory dir = new MockRAMDirectory();
-			IndexWriter modifier = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
+            IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
 			modifier.SetMaxBufferedDocs(2);
 			modifier.SetMaxBufferedDeleteTerms(2);
 			
@@ -453,8 +426,8 @@ namespace Lucene.Net.Index
 			modifier.Commit();
 			
 			AddDoc(modifier, ++id, value_Renamed);
-			
-			IndexReader reader = IndexReader.Open(dir);
+
+		    IndexReader reader = IndexReader.Open(dir, true);
 			Assert.AreEqual(7, reader.NumDocs());
 			reader.Close();
 			
@@ -466,7 +439,7 @@ namespace Lucene.Net.Index
 			modifier.Close();
 			
 			// Validate that the docs are still there
-			reader = IndexReader.Open(dir);
+		    reader = IndexReader.Open(dir, true);
 			Assert.AreEqual(7, reader.NumDocs());
 			reader.Close();
 			
@@ -479,7 +452,7 @@ namespace Lucene.Net.Index
 		public virtual void  TestDeleteAllNRT()
 		{
 			Directory dir = new MockRAMDirectory();
-			IndexWriter modifier = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
+            IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
 			modifier.SetMaxBufferedDocs(2);
 			modifier.SetMaxBufferedDeleteTerms(2);
 			
@@ -512,7 +485,7 @@ namespace Lucene.Net.Index
 			modifier.Close();
 			
 			// Validate that the docs are still there
-			reader = IndexReader.Open(dir);
+		    reader = IndexReader.Open(dir, true);
 			Assert.AreEqual(7, reader.NumDocs());
 			reader.Close();
 			
@@ -541,7 +514,7 @@ namespace Lucene.Net.Index
 		
 		private int GetHitCount(Directory dir, Term term)
 		{
-			IndexSearcher searcher = new IndexSearcher(dir);
+		    IndexSearcher searcher = new IndexSearcher(dir, true);
 			int hitCount = searcher.Search(new TermQuery(term), null, 1000).TotalHits;
 			searcher.Close();
 			return hitCount;
@@ -562,376 +535,371 @@ namespace Lucene.Net.Index
 		/// <summary> Make sure if modifier tries to commit but hits disk full that modifier
 		/// remains consistent and usable. Similar to TestIndexReader.testDiskFull().
 		/// </summary>
-		private void  TestOperationsOnDiskFull(bool updates)
+        private void TestOperationsOnDiskFull(bool updates)
 		{
-			
-			bool debug = false;
-			Term searchTerm = new Term("content", "aaa");
-			int START_COUNT = 157;
-			int END_COUNT = 144;
-			
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				
-				// First build up a starting index:
-				MockRAMDirectory startDir = new MockRAMDirectory();
-				IndexWriter writer = new IndexWriter(startDir, autoCommit, new WhitespaceAnalyzer(), true);
-				for (int i = 0; i < 157; i++)
-				{
-					Document d = new Document();
-					d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
-					d.Add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.ANALYZED));
-					writer.AddDocument(d);
-				}
-				writer.Close();
-				
-				long diskUsage = startDir.SizeInBytes();
-				long diskFree = diskUsage + 10;
-				
-				System.IO.IOException err = null;
-				
-				bool done = false;
-				
-				// Iterate w/ ever increasing free disk space:
-				while (!done)
-				{
-					MockRAMDirectory dir = new MockRAMDirectory(startDir);
-					dir.SetPreventDoubleWrite(false);
-					IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
-					
-					modifier.SetMaxBufferedDocs(1000); // use flush or close
-					modifier.SetMaxBufferedDeleteTerms(1000); // use flush or close
-					
-					// For each disk size, first try to commit against
-					// dir that will hit random IOExceptions & disk
-					// full; after, give it infinite disk space & turn
-					// off random IOExceptions & retry w/ same reader:
-					bool success = false;
-					
-					for (int x = 0; x < 2; x++)
-					{
-						
-						double rate = 0.1;
-						double diskRatio = ((double) diskFree) / diskUsage;
-						long thisDiskFree;
-						System.String testName;
-						
-						if (0 == x)
-						{
-							thisDiskFree = diskFree;
-							if (diskRatio >= 2.0)
-							{
-								rate /= 2;
-							}
-							if (diskRatio >= 4.0)
-							{
-								rate /= 2;
-							}
-							if (diskRatio >= 6.0)
-							{
-								rate = 0.0;
-							}
-							if (debug)
-							{
-								System.Console.Out.WriteLine("\ncycle: " + diskFree + " bytes");
-							}
-							testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";
-						}
-						else
-						{
-							thisDiskFree = 0;
-							rate = 0.0;
-							if (debug)
-							{
-								System.Console.Out.WriteLine("\ncycle: same writer: unlimited disk space");
-							}
-							testName = "reader re-use after disk full";
-						}
-						
-						dir.SetMaxSizeInBytes(thisDiskFree);
-						dir.SetRandomIOExceptionRate(rate, diskFree);
-						
-						try
-						{
-							if (0 == x)
-							{
-								int docId = 12;
-								for (int i = 0; i < 13; i++)
-								{
-									if (updates)
-									{
-										Document d = new Document();
-										d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
-										d.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.ANALYZED));
-										modifier.UpdateDocument(new Term("id", System.Convert.ToString(docId)), d);
-									}
-									else
-									{
-										// deletes
-										modifier.DeleteDocuments(new Term("id", System.Convert.ToString(docId)));
-										// modifier.setNorm(docId, "contents", (float)2.0);
-									}
-									docId += 12;
-								}
-							}
-							modifier.Close();
-							success = true;
-							if (0 == x)
-							{
-								done = true;
-							}
-						}
-						catch (System.IO.IOException e)
-						{
-							if (debug)
-							{
-								System.Console.Out.WriteLine("  hit IOException: " + e);
-								System.Console.Out.WriteLine(e.StackTrace);
-							}
-							err = e;
-							if (1 == x)
-							{
-								System.Console.Error.WriteLine(e.StackTrace);
-								Assert.Fail(testName + " hit IOException after disk space was freed up");
-							}
-						}
-						
-						// If the close() succeeded, make sure there are
-						// no unreferenced files.
-                        if (success)
-                        {
-                            Lucene.Net.Util._TestUtil.CheckIndex(dir);
-                            TestIndexWriter.AssertNoUnreferencedFiles(dir, "after writer.close");
-                        }
-						
-						// Finally, verify index is not corrupt, and, if
-						// we succeeded, we see all docs changed, and if
-						// we failed, we see either all docs or no docs
-						// changed (transactional semantics):
-						IndexReader newReader = null;
-						try
-						{
-							newReader = IndexReader.Open(dir);
-						}
-						catch (System.IO.IOException e)
-						{
-							System.Console.Error.WriteLine(e.StackTrace);
-							Assert.Fail(testName + ":exception when creating IndexReader after disk full during close: " + e);
-						}
-						
-						IndexSearcher searcher = new IndexSearcher(newReader);
-						ScoreDoc[] hits = null;
-						try
-						{
-							hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
-						}
-						catch (System.IO.IOException e)
-						{
-							System.Console.Error.WriteLine(e.StackTrace);
-							Assert.Fail(testName + ": exception when searching: " + e);
-						}
-						int result2 = hits.Length;
-						if (success)
-						{
-							if (x == 0 && result2 != END_COUNT)
-							{
-								Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);
-							}
-							else if (x == 1 && result2 != START_COUNT && result2 != END_COUNT)
-							{
-								// It's possible that the first exception was
-								// "recoverable" wrt pending deletes, in which
-								// case the pending deletes are retained and
-								// then re-flushing (with plenty of disk
-								// space) will succeed in flushing the
-								// deletes:
-								Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
-							}
-						}
-						else
-						{
-							// On hitting exception we still may have added
-							// all docs:
-							if (result2 != START_COUNT && result2 != END_COUNT)
-							{
-								System.Console.Error.WriteLine(err.StackTrace);
-								Assert.Fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
-							}
-						}
-						
-						searcher.Close();
-						newReader.Close();
-						
-						if (result2 == END_COUNT)
-						{
-							break;
-						}
-					}
-					
-					dir.Close();
-					
-					// Try again with 10 more bytes of free space:
-					diskFree += 10;
-				}
-			}
+
+		    bool debug = false;
+		    Term searchTerm = new Term("content", "aaa");
+		    int START_COUNT = 157;
+		    int END_COUNT = 144;
+
+		    // First build up a starting index:
+		    MockRAMDirectory startDir = new MockRAMDirectory();
+            IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
+		    for (int i = 0; i < 157; i++)
+		    {
+		        Document d = new Document();
+		        d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
+		        d.Add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.ANALYZED));
+		        writer.AddDocument(d);
+		    }
+		    writer.Close();
+
+		    long diskUsage = startDir.SizeInBytes();
+		    long diskFree = diskUsage + 10;
+
+		    System.IO.IOException err = null;
+
+		    bool done = false;
+
+		    // Iterate w/ ever increasing free disk space:
+		    while (!done)
+		    {
+		        MockRAMDirectory dir = new MockRAMDirectory(startDir);
+		        dir.SetPreventDoubleWrite(false);
+                IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
+
+		        modifier.SetMaxBufferedDocs(1000); // use flush or close
+		        modifier.SetMaxBufferedDeleteTerms(1000); // use flush or close
+
+		        // For each disk size, first try to commit against
+		        // dir that will hit random IOExceptions & disk
+		        // full; after, give it infinite disk space & turn
+		        // off random IOExceptions & retry w/ same reader:
+		        bool success = false;
+
+		        for (int x = 0; x < 2; x++)
+		        {
+
+		            double rate = 0.1;
+		            double diskRatio = ((double) diskFree)/diskUsage;
+		            long thisDiskFree;
+		            System.String testName;
+
+		            if (0 == x)
+		            {
+		                thisDiskFree = diskFree;
+		                if (diskRatio >= 2.0)
+		                {
+		                    rate /= 2;
+		                }
+		                if (diskRatio >= 4.0)
+		                {
+		                    rate /= 2;
+		                }
+		                if (diskRatio >= 6.0)
+		                {
+		                    rate = 0.0;
+		                }
+		                if (debug)
+		                {
+		                    System.Console.Out.WriteLine("\ncycle: " + diskFree + " bytes");
+		                }
+		                testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";
+		            }
+		            else
+		            {
+		                thisDiskFree = 0;
+		                rate = 0.0;
+		                if (debug)
+		                {
+		                    System.Console.Out.WriteLine("\ncycle: same writer: unlimited disk space");
+		                }
+		                testName = "reader re-use after disk full";
+		            }
+
+		            dir.SetMaxSizeInBytes(thisDiskFree);
+		            dir.SetRandomIOExceptionRate(rate, diskFree);
+
+		            try
+		            {
+		                if (0 == x)
+		                {
+		                    int docId = 12;
+		                    for (int i = 0; i < 13; i++)
+		                    {
+		                        if (updates)
+		                        {
+		                            Document d = new Document();
+		                            d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES,
+		                                            Field.Index.NOT_ANALYZED));
+		                            d.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.ANALYZED));
+		                            modifier.UpdateDocument(new Term("id", System.Convert.ToString(docId)), d);
+		                        }
+		                        else
+		                        {
+		                            // deletes
+		                            modifier.DeleteDocuments(new Term("id", System.Convert.ToString(docId)));
+		                            // modifier.setNorm(docId, "contents", (float)2.0);
+		                        }
+		                        docId += 12;
+		                    }
+		                }
+		                modifier.Close();
+		                success = true;
+		                if (0 == x)
+		                {
+		                    done = true;
+		                }
+		            }
+		            catch (System.IO.IOException e)
+		            {
+		                if (debug)
+		                {
+		                    System.Console.Out.WriteLine("  hit IOException: " + e);
+		                    System.Console.Out.WriteLine(e.StackTrace);
+		                }
+		                err = e;
+		                if (1 == x)
+		                {
+		                    System.Console.Error.WriteLine(e.StackTrace);
+		                    Assert.Fail(testName + " hit IOException after disk space was freed up");
+		                }
+		            }
+
+		            // If the close() succeeded, make sure there are
+		            // no unreferenced files.
+		            if (success)
+		            {
+		                Lucene.Net.Util._TestUtil.CheckIndex(dir);
+		                TestIndexWriter.AssertNoUnreferencedFiles(dir, "after writer.close");
+		            }
+
+		            // Finally, verify index is not corrupt, and, if
+		            // we succeeded, we see all docs changed, and if
+		            // we failed, we see either all docs or no docs
+		            // changed (transactional semantics):
+		            IndexReader newReader = null;
+		            try
+		            {
+		                newReader = IndexReader.Open(dir, true);
+		            }
+		            catch (System.IO.IOException e)
+		            {
+		                System.Console.Error.WriteLine(e.StackTrace);
+		                Assert.Fail(testName + ":exception when creating IndexReader after disk full during close: " + e);
+		            }
+
+		            IndexSearcher searcher = new IndexSearcher(newReader);
+		            ScoreDoc[] hits = null;
+		            try
+		            {
+		                hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
+		            }
+		            catch (System.IO.IOException e)
+		            {
+		                System.Console.Error.WriteLine(e.StackTrace);
+		                Assert.Fail(testName + ": exception when searching: " + e);
+		            }
+		            int result2 = hits.Length;
+		            if (success)
+		            {
+		                if (x == 0 && result2 != END_COUNT)
+		                {
+		                    Assert.Fail(testName +
+		                                ": method did not throw exception but hits.length for search on term 'aaa' is " +
+		                                result2 + " instead of expected " + END_COUNT);
+		                }
+		                else if (x == 1 && result2 != START_COUNT && result2 != END_COUNT)
+		                {
+		                    // It's possible that the first exception was
+		                    // "recoverable" wrt pending deletes, in which
+		                    // case the pending deletes are retained and
+		                    // then re-flushing (with plenty of disk
+		                    // space) will succeed in flushing the
+		                    // deletes:
+		                    Assert.Fail(testName +
+		                                ": method did not throw exception but hits.length for search on term 'aaa' is " +
+		                                result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
+		                }
+		            }
+		            else
+		            {
+		                // On hitting exception we still may have added
+		                // all docs:
+		                if (result2 != START_COUNT && result2 != END_COUNT)
+		                {
+		                    System.Console.Error.WriteLine(err.StackTrace);
+		                    Assert.Fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " +
+		                                result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
+		                }
+		            }
+
+		            searcher.Close();
+		            newReader.Close();
+
+		            if (result2 == END_COUNT)
+		            {
+		                break;
+		            }
+		        }
+
+		        dir.Close();
+
+		        // Try again with 10 more bytes of free space:
+		        diskFree += 10;
+		    }
 		}
-		
-		// This test tests that buffered deletes are cleared when
+
+        // This test tests that buffered deletes are cleared when
 		// an Exception is hit during flush.
-		[Test]
-		public virtual void  TestErrorAfterApplyDeletes()
-		{
-			
-			MockRAMDirectory.Failure failure = new AnonymousClassFailure(this);
-			
-			// create a couple of files
-			
-			System.String[] keywords = new System.String[]{"1", "2"};
-			System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
-			System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
-			System.String[] text = new System.String[]{"Amsterdam", "Venice"};
-			
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				MockRAMDirectory dir = new MockRAMDirectory();
-				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				modifier.SetUseCompoundFile(true);
-				modifier.SetMaxBufferedDeleteTerms(2);
-				
-				dir.FailOn(failure.Reset());
-				
-				for (int i = 0; i < keywords.Length; i++)
-				{
-					Document doc = new Document();
-					doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
-					doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
-					doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
-					doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
-					modifier.AddDocument(doc);
-				}
-				// flush (and commit if ac)
-				
-				modifier.Optimize();
-				modifier.Commit();
-				
-				// one of the two files hits
-				
-				Term term = new Term("city", "Amsterdam");
-				int hitCount = GetHitCount(dir, term);
-				Assert.AreEqual(1, hitCount);
-				
-				// open the writer again (closed above)
-				
-				// delete the doc
-				// max buf del terms is two, so this is buffered
-				
-				modifier.DeleteDocuments(term);
-				
-				// add a doc (needed for the !ac case; see below)
-				// doc remains buffered
-				
-				Document doc2 = new Document();
-				modifier.AddDocument(doc2);
-				
-				// commit the changes, the buffered deletes, and the new doc
-				
-				// The failure object will fail on the first write after the del
-				// file gets created when processing the buffered delete
-				
-				// in the ac case, this will be when writing the new segments
-				// files so we really don't need the new doc, but it's harmless
-				
-				// in the !ac case, a new segments file won't be created but in
-				// this case, creation of the cfs file happens next so we need
-				// the doc (to test that it's okay that we don't lose deletes if
-				// failing while creating the cfs file)
-				
-				bool failed = false;
-				try
-				{
-					modifier.Commit();
-				}
-				catch (System.IO.IOException ioe)
-				{
-					failed = true;
-				}
-				
-				Assert.IsTrue(failed);
-				
-				// The commit above failed, so we need to retry it (which will
-				// succeed, because the failure is a one-shot)
-				
-				modifier.Commit();
-				
-				hitCount = GetHitCount(dir, term);
-				
-				// Make sure the delete was successfully flushed:
-				Assert.AreEqual(0, hitCount);
-				
-				modifier.Close();
-				dir.Close();
-			}
-		}
-		
-		// This test tests that the files created by the docs writer before
+        [Test]
+        public virtual void TestErrorAfterApplyDeletes()
+        {
+            MockRAMDirectory.Failure failure = new AnonymousClassFailure(this);
+
+            // create a couple of files
+
+            System.String[] keywords = new System.String[] {"1", "2"};
+            System.String[] unindexed = new System.String[] {"Netherlands", "Italy"};
+            System.String[] unstored = new System.String[]
+                                           {"Amsterdam has lots of bridges", "Venice has lots of canals"};
+            System.String[] text = new System.String[] {"Amsterdam", "Venice"};
+
+            MockRAMDirectory dir = new MockRAMDirectory();
+            IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
+            modifier.SetUseCompoundFile(true);
+            modifier.SetMaxBufferedDeleteTerms(2);
+
+            dir.FailOn(failure.Reset());
+
+            for (int i = 0; i < keywords.Length; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
+                doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
+                doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
+                doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
+                modifier.AddDocument(doc);
+            }
+            // flush (and commit if ac)
+
+            modifier.Optimize();
+            modifier.Commit();
+
+            // one of the two files hits
+
+            Term term = new Term("city", "Amsterdam");
+            int hitCount = GetHitCount(dir, term);
+            Assert.AreEqual(1, hitCount);
+
+            // open the writer again (closed above)
+
+            // delete the doc
+            // max buf del terms is two, so this is buffered
+
+            modifier.DeleteDocuments(term);
+
+            // add a doc (needed for the !ac case; see below)
+            // doc remains buffered
+
+            Document doc2 = new Document();
+            modifier.AddDocument(doc2);
+
+            // commit the changes, the buffered deletes, and the new doc
+
+            // The failure object will fail on the first write after the del
+            // file gets created when processing the buffered delete
+
+            // in the ac case, this will be when writing the new segments
+            // files so we really don't need the new doc, but it's harmless
+
+            // in the !ac case, a new segments file won't be created but in
+            // this case, creation of the cfs file happens next so we need
+            // the doc (to test that it's okay that we don't lose deletes if
+            // failing while creating the cfs file)
+
+            bool failed = false;
+            try
+            {
+                modifier.Commit();
+            }
+            catch (System.IO.IOException ioe)
+            {
+                failed = true;
+            }
+
+            Assert.IsTrue(failed);
+
+            // The commit above failed, so we need to retry it (which will
+            // succeed, because the failure is a one-shot)
+
+            modifier.Commit();
+
+            hitCount = GetHitCount(dir, term);
+
+            // Make sure the delete was successfully flushed:
+            Assert.AreEqual(0, hitCount);
+
+            modifier.Close();
+            dir.Close();
+        }
+
+        // This test tests that the files created by the docs writer before
 		// a segment is written are cleaned up if there's an i/o error
-		
-		[Test]
-		public virtual void  TestErrorInDocsWriterAdd()
-		{
-			
-			MockRAMDirectory.Failure failure = new AnonymousClassFailure1(this);
-			
-			// create a couple of files
-			
-			System.String[] keywords = new System.String[]{"1", "2"};
-			System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
-			System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
-			System.String[] text = new System.String[]{"Amsterdam", "Venice"};
-			
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				MockRAMDirectory dir = new MockRAMDirectory();
-				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				
-				dir.FailOn(failure.Reset());
-				
-				for (int i = 0; i < keywords.Length; i++)
-				{
-					Document doc = new Document();
-					doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
-					doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
-					doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
-					doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
-					try
-					{
-						modifier.AddDocument(doc);
-					}
-					catch (System.IO.IOException io)
-					{
-						break;
-					}
-				}
-				
-				System.String[] startFiles = dir.ListAll();
-				SegmentInfos infos = new SegmentInfos();
-				infos.Read(dir);
-				new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null,null);
-				System.String[] endFiles = dir.ListAll();
-				
-				if (!CollectionsHelper.CompareStringArrays(startFiles, endFiles))
-				{
-					Assert.Fail("docswriter abort() failed to delete unreferenced files:\n  before delete:\n    " + ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
-				}
-				
-				modifier.Close();
-			}
-		}
-		
-		private System.String ArrayToString(System.String[] l)
+
+        [Test]
+        public virtual void TestErrorInDocsWriterAdd()
+        {
+
+            MockRAMDirectory.Failure failure = new AnonymousClassFailure1(this);
+
+            // create a couple of files
+
+            System.String[] keywords = new System.String[] {"1", "2"};
+            System.String[] unindexed = new System.String[] {"Netherlands", "Italy"};
+            System.String[] unstored = new System.String[]
+                                           {"Amsterdam has lots of bridges", "Venice has lots of canals"};
+            System.String[] text = new System.String[] {"Amsterdam", "Venice"};
+
+            MockRAMDirectory dir = new MockRAMDirectory();
+            IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
+
+            dir.FailOn(failure.Reset());
+
+            for (int i = 0; i < keywords.Length; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
+                doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
+                doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
+                doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
+                try
+                {
+                    modifier.AddDocument(doc);
+                }
+                catch (System.IO.IOException io)
+                {
+                    break;
+                }
+            }
+
+            System.String[] startFiles = dir.ListAll();
+            SegmentInfos infos = new SegmentInfos();
+            infos.Read(dir);
+            new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null, null);
+            System.String[] endFiles = dir.ListAll();
+
+            if (!CollectionsHelper.CompareStringArrays(startFiles, endFiles))
+            {
+                Assert.Fail("docswriter abort() failed to delete unreferenced files:\n  before delete:\n    " +
+                            ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
+            }
+
+            modifier.Close();
+        }
+
+        private System.String ArrayToString(System.String[] l)
 		{
 			System.String s = "";
 			for (int i = 0; i < l.Length; i++)

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterLockRelease.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterLockRelease.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterLockRelease.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterLockRelease.cs Wed Nov 16 05:24:27 2011
@@ -16,6 +16,7 @@
  */
 
 using System;
+using Lucene.Net.Store;
 using Lucene.Net.Support;
 using NUnit.Framework;
 
@@ -35,7 +36,7 @@ namespace Lucene.Net.Index
     [TestFixture]
 	public class TestIndexWriterLockRelease:LuceneTestCase
 	{
-		private System.IO.FileInfo __test_dir;
+		private System.IO.DirectoryInfo __test_dir;
 		
 		[SetUp]
 		public override void  SetUp()
@@ -44,7 +45,7 @@ namespace Lucene.Net.Index
 			if (this.__test_dir == null)
 			{
 				System.String tmp_dir = AppSettings.Get("java.io.tmpdir", "tmp");
-				this.__test_dir = new System.IO.FileInfo(System.IO.Path.Combine(tmp_dir, "testIndexWriter"));
+				this.__test_dir = new System.IO.DirectoryInfo(System.IO.Path.Combine(tmp_dir, "testIndexWriter"));
 				
 				bool tmpBool;
 				if (System.IO.File.Exists(this.__test_dir.FullName))
@@ -126,21 +127,25 @@ namespace Lucene.Net.Index
 		public virtual void  TestIndexWriterLockRelease_Renamed()
 		{
 			IndexWriter im;
-			
-			try
-			{
-				im = new IndexWriter(this.__test_dir, new Lucene.Net.Analysis.Standard.StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
-			}
-			catch (System.IO.FileNotFoundException e)
-			{
-				try
-				{
-					im = new IndexWriter(this.__test_dir, new Lucene.Net.Analysis.Standard.StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
-				}
-				catch (System.IO.FileNotFoundException e1)
-				{
-				}
-			}
+		    FSDirectory dir = FSDirectory.Open(this.__test_dir);
+            try
+            {
+                im = new IndexWriter(dir, new Lucene.Net.Analysis.Standard.StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+            }
+            catch (System.IO.FileNotFoundException e)
+            {
+                try
+                {
+                    im = new IndexWriter(dir, new Lucene.Net.Analysis.Standard.StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+                }
+                catch (System.IO.FileNotFoundException e1)
+                {
+                }
+            }
+            finally
+            {
+                dir.Close();
+            }
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterMergePolicy.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterMergePolicy.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterMergePolicy.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterMergePolicy.cs Wed Nov 16 05:24:27 2011
@@ -145,7 +145,7 @@ namespace Lucene.Net.Index
 		{
 			Directory dir = new RAMDirectory();
 			
-			IndexWriter writer = new IndexWriter(dir, true, new WhitespaceAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
 			writer.SetMaxBufferedDocs(101);
 			writer.SetMergeFactor(101);
 			writer.SetMergePolicy(new LogDocMergePolicy(writer));
@@ -161,7 +161,7 @@ namespace Lucene.Net.Index
 				}
 				writer.Close();
 				
-				writer = new IndexWriter(dir, true, new WhitespaceAnalyzer(), false);
+				writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
 				writer.SetMaxBufferedDocs(101);
 				writer.SetMergeFactor(101);
 				writer.SetMergePolicy(new LogDocMergePolicy(writer));
@@ -182,6 +182,9 @@ namespace Lucene.Net.Index
 			{
 				AddDoc(writer);
 			}
+		    writer.Commit();
+		    ((ConcurrentMergeScheduler) writer.GetMergeScheduler()).Sync();
+		    writer.Commit();
 			CheckInvariants(writer);
 			
 			writer.Close();
@@ -193,7 +196,7 @@ namespace Lucene.Net.Index
 		{
 			Directory dir = new RAMDirectory();
 			
-			IndexWriter writer = new IndexWriter(dir, true, new WhitespaceAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
 			writer.SetMergePolicy(new LogDocMergePolicy(writer));
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(100);
@@ -204,12 +207,12 @@ namespace Lucene.Net.Index
 				CheckInvariants(writer);
 			}
 			writer.Close();
-			
-			IndexReader reader = IndexReader.Open(dir);
+
+		    IndexReader reader = IndexReader.Open(dir, false);
 			reader.DeleteDocuments(new Term("content", "aaa"));
 			reader.Close();
 			
-			writer = new IndexWriter(dir, true, new WhitespaceAnalyzer(), false);
+			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
 			writer.SetMergePolicy(new LogDocMergePolicy(writer));
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(5);
@@ -219,6 +222,9 @@ namespace Lucene.Net.Index
 			{
 				AddDoc(writer);
 			}
+		    writer.Commit();
+            ((ConcurrentMergeScheduler)writer.GetMergeScheduler()).Sync();
+		    writer.Commit();
 			CheckInvariants(writer);
 			Assert.AreEqual(10, writer.MaxDoc());
 			
@@ -260,7 +266,7 @@ namespace Lucene.Net.Index
 				{
 					if (upperBound * mergeFactor <= maxMergeDocs)
 					{
-						Assert.IsTrue(numSegments < mergeFactor);
+                        Assert.IsTrue(numSegments < mergeFactor, "maxMergeDocs=" + maxMergeDocs + "; numSegments=" + numSegments + "; upperBound=" + upperBound + "; mergeFactor=" + mergeFactor);
 					}
 					
 					do 

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterMerging.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterMerging.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterMerging.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterMerging.cs Wed Nov 16 05:24:27 2011
@@ -34,7 +34,7 @@ namespace Lucene.Net.Index
 	public class TestIndexWriterMerging:LuceneTestCase
 	{
 		
-		/// <summary> Tests that index merging (specifically addIndexes()) doesn't
+		/// <summary> Tests that index merging (specifically AddIndexesNoOptimize()) doesn't
 		/// change the index order of documents.
 		/// </summary>
 		[Test]
@@ -65,7 +65,8 @@ namespace Lucene.Net.Index
 			IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetMergeFactor(2);
 			
-			writer.AddIndexes(new Directory[]{indexA, indexB});
+			writer.AddIndexesNoOptimize(new []{indexA, indexB});
+            writer.Optimize();
 			writer.Close();
 			
 			fail = VerifyIndex(merged, 0);
@@ -77,7 +78,7 @@ namespace Lucene.Net.Index
 		private bool VerifyIndex(Directory directory, int startAt)
 		{
 			bool fail = false;
-			IndexReader reader = IndexReader.Open(directory);
+			IndexReader reader = IndexReader.Open(directory, true);
 			
 			int max = reader.MaxDoc();
 			for (int i = 0; i < max; i++)

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterReader.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterReader.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriterReader.cs Wed Nov 16 05:24:27 2011
@@ -216,8 +216,8 @@ namespace Lucene.Net.Index
 			r1.Close();
 			writer.Close();
 			Assert.IsTrue(r2.IsCurrent());
-			
-			IndexReader r3 = IndexReader.Open(dir1);
+
+		    IndexReader r3 = IndexReader.Open(dir1, true);
 			Assert.IsTrue(r3.IsCurrent());
 			Assert.IsTrue(r2.IsCurrent());
 			Assert.AreEqual(0, Count(new Term("id", id10), r3));
@@ -402,8 +402,8 @@ namespace Lucene.Net.Index
 			Assert.IsTrue(addDirThreads.failures.Count == 0);
 			
 			_TestUtil.CheckIndex(mainDir);
-			
-			IndexReader reader = IndexReader.Open(mainDir);
+
+		    IndexReader reader = IndexReader.Open(mainDir, true);
 			Assert.AreEqual(addDirThreads.count.IntValue(), reader.NumDocs());
 			//Assert.AreEqual(100 + numDirs * (3 * numIter / 4) * addDirThreads.NUM_THREADS
 			//    * addDirThreads.NUM_INIT_DOCS, reader.numDocs());
@@ -526,7 +526,7 @@ namespace Lucene.Net.Index
 					}
 					catch (System.Threading.ThreadInterruptedException ie)
 					{
-						ThreadClass.Current().Interrupt();
+					    throw;
 					}
 			}
 		}
@@ -626,7 +626,7 @@ namespace Lucene.Net.Index
 				
 				readers = new IndexReader[numDirs];
 				for (int i = 0; i < numDirs; i++)
-					readers[i] = IndexReader.Open(addDir);
+				    readers[i] = IndexReader.Open(addDir, false);
 			}
 			
 			internal virtual void  JoinThreads()
@@ -638,7 +638,7 @@ namespace Lucene.Net.Index
 					}
 					catch (System.Threading.ThreadInterruptedException ie)
 					{
-						ThreadClass.Current().Interrupt();
+					    throw;
 					}
 			}
 			
@@ -680,7 +680,8 @@ namespace Lucene.Net.Index
 				{
 					
 					case 0: 
-						mainWriter.AddIndexes(dirs);
+						mainWriter.AddIndexesNoOptimize(dirs);
+                        mainWriter.Optimize();
 						break;
 					
 					case 1: 
@@ -1157,7 +1158,7 @@ namespace Lucene.Net.Index
 			w.ExpungeDeletes();
 			w.Close();
 			r.Close();
-			r = IndexReader.Open(dir);
+			r = IndexReader.Open(dir, true);
 			Assert.AreEqual(1, r.NumDocs());
 			Assert.IsFalse(r.HasDeletions());
 			r.Close();

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestLazyBug.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestLazyBug.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestLazyBug.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestLazyBug.cs Wed Nov 16 05:24:27 2011
@@ -96,13 +96,13 @@ namespace Lucene.Net.Index
                     dataset.Add(data[i], data[i]);
 
 			Directory dir = MakeIndex();
-			IndexReader reader = IndexReader.Open(dir);
+		    IndexReader reader = IndexReader.Open(dir, true);
 			for (int i = 0; i < docs.Length; i++)
 			{
 				Document d = reader.Document(docs[i], SELECTOR);
 				d.Get(MAGIC_FIELD);
 				
-				System.Collections.IList fields = d.GetFields();
+				var fields = d.GetFields();
 				for (System.Collections.IEnumerator fi = fields.GetEnumerator(); fi.MoveNext(); )
 				{
 					Fieldable f = null;

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestMultiLevelSkipList.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestMultiLevelSkipList.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestMultiLevelSkipList.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestMultiLevelSkipList.cs Wed Nov 16 05:24:27 2011
@@ -57,7 +57,7 @@ namespace Lucene.Net.Index
 				d1.Add(new Field(term.Field(), term.Text(), Field.Store.NO, Field.Index.ANALYZED));
 				writer.AddDocument(d1);
 			}
-			writer.Flush();
+			writer.Commit();
 			writer.Optimize();
 			writer.Close();
 			

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestMultiReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestMultiReader.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestMultiReader.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestMultiReader.cs Wed Nov 16 05:24:27 2011
@@ -38,8 +38,8 @@ namespace Lucene.Net.Index
 			IndexReader reader;
 			
 			sis.Read(dir);
-			SegmentReader reader1 = SegmentReader.Get(sis.Info(0));
-			SegmentReader reader2 = SegmentReader.Get(sis.Info(1));
+			SegmentReader reader1 = SegmentReader.Get(false, sis.Info(0), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+            SegmentReader reader2 = SegmentReader.Get(false, sis.Info(1), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 			readers[0] = reader1;
 			readers[1] = reader2;
 			Assert.IsTrue(reader1 != null);

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestNorms.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestNorms.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestNorms.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestNorms.cs Wed Nov 16 05:24:27 2011
@@ -139,7 +139,8 @@ namespace Lucene.Net.Index
 			IndexWriter iw = new IndexWriter(dir3, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
 			iw.SetMaxBufferedDocs(5);
 			iw.SetMergeFactor(3);
-			iw.AddIndexes(new Directory[]{dir1, dir2});
+			iw.AddIndexesNoOptimize(new Directory[]{dir1, dir2});
+            iw.Optimize();
 			iw.Close();
 			
 			norms1.AddRange(norms);
@@ -192,18 +193,18 @@ namespace Lucene.Net.Index
 		
 		private void  ModifyNormsForF1(Directory dir)
 		{
-			IndexReader ir = IndexReader.Open(dir);
+			IndexReader ir = IndexReader.Open(dir, false);
 			int n = ir.MaxDoc();
 			for (int i = 0; i < n; i += 3)
 			{
 				// modify for every third doc
 				int k = (i * 3) % modifiedNorms.Count;
-				float origNorm = (float) ((System.Single) modifiedNorms[i]);
-				float newNorm = (float) ((System.Single) modifiedNorms[k]);
+				float origNorm = (float)modifiedNorms[i];
+                float newNorm = (float)modifiedNorms[k];
 				//System.out.println("Modifying: for "+i+" from "+origNorm+" to "+newNorm);
 				//System.out.println("      and: for "+k+" from "+newNorm+" to "+origNorm);
-				modifiedNorms[i] = (float) newNorm;
-				modifiedNorms[k] = (float) origNorm;
+				modifiedNorms[i] = newNorm;
+				modifiedNorms[k] = origNorm;
 				ir.SetNorm(i, "f" + 1, newNorm);
 				ir.SetNorm(k, "f" + 1, origNorm);
 			}
@@ -213,7 +214,7 @@ namespace Lucene.Net.Index
 		
 		private void  VerifyIndex(Directory dir)
 		{
-			IndexReader ir = IndexReader.Open(dir);
+		    IndexReader ir = IndexReader.Open(dir, false);
 			for (int i = 0; i < NUM_FIELDS; i++)
 			{
 				System.String field = "f" + i;
@@ -223,7 +224,7 @@ namespace Lucene.Net.Index
 				for (int j = 0; j < b.Length; j++)
 				{
 					float norm = Similarity.DecodeNorm(b[j]);
-					float norm1 = (float) ((System.Single) storedNorms[j]);
+					float norm1 = (float)storedNorms[j];
 					Assert.AreEqual(norm, norm1, 0.000001, "stored norm value of " + field + " for doc " + j + " is " + norm + " - a mismatch!");
 				}
 			}
@@ -273,8 +274,8 @@ namespace Lucene.Net.Index
 				norm += normDelta;
 			}
 			while (true);
-			norms.Insert(numDocNorms, (float) norm);
-			modifiedNorms.Insert(numDocNorms, (float) norm);
+			norms.Insert(numDocNorms, norm);
+			modifiedNorms.Insert(numDocNorms, norm);
 			//System.out.println("creating norm("+numDocNorms+"): "+norm);
 			numDocNorms++;
 			lastNorm = (norm > 10?0:norm); //there's a limit to how many distinct values can be stored in a ingle byte

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestPayloads.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestPayloads.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestPayloads.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestPayloads.cs Wed Nov 16 05:24:27 2011
@@ -261,7 +261,7 @@ namespace Lucene.Net.Index
 			}
 			
 			// make sure we create more than one segment to test merging
-			writer.Flush();
+			writer.Commit();
 			
 			// now we make sure to have different payload lengths next at the next skip point        
 			for (int i = 0; i < numDocs; i++)
@@ -280,7 +280,7 @@ namespace Lucene.Net.Index
 			* Verify the index
 			* first we test if all payloads are stored correctly
 			*/
-			IndexReader reader = IndexReader.Open(dir);
+		    IndexReader reader = IndexReader.Open(dir, true);
 			
 			byte[] verifyPayloadData = new byte[payloadDataLength];
 			offset = 0;
@@ -388,8 +388,8 @@ namespace Lucene.Net.Index
 			writer.Optimize();
 			// flush
 			writer.Close();
-			
-			reader = IndexReader.Open(dir);
+
+		    reader = IndexReader.Open(dir, true);
 			tp = reader.TermPositions(new Term(fieldName, singleTerm));
 			tp.Next();
 			tp.NextPosition();
@@ -573,7 +573,7 @@ namespace Lucene.Net.Index
 				ingesters[i].Join();
 			}
 			writer.Close();
-			IndexReader reader = IndexReader.Open(dir);
+		    IndexReader reader = IndexReader.Open(dir, true);
 			TermEnum terms = reader.Terms();
 			while (terms.Next())
 			{

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestSegmentMerger.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestSegmentMerger.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestSegmentMerger.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestSegmentMerger.cs Wed Nov 16 05:24:27 2011
@@ -59,8 +59,8 @@ namespace Lucene.Net.Index
 			SegmentInfo info1 = DocHelper.WriteDoc(merge1Dir, doc1);
 			DocHelper.SetupDoc(doc2);
 			SegmentInfo info2 = DocHelper.WriteDoc(merge2Dir, doc2);
-			reader1 = SegmentReader.Get(info1);
-			reader2 = SegmentReader.Get(info2);
+			reader1 = SegmentReader.Get(true, info1, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+            reader2 = SegmentReader.Get(true, info2, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 		}
 
         [TearDown]
@@ -93,7 +93,7 @@ namespace Lucene.Net.Index
 			merger.CloseReaders();
 			Assert.IsTrue(docsMerged == 2);
 			//Should be able to open a new SegmentReader against the new directory
-			SegmentReader mergedReader = SegmentReader.Get(new SegmentInfo(mergedSegment, docsMerged, mergedDir, false, true));
+            SegmentReader mergedReader = SegmentReader.Get(true, new SegmentInfo(mergedSegment, docsMerged, mergedDir, false, true), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 			Assert.IsTrue(mergedReader != null);
 			Assert.IsTrue(mergedReader.NumDocs() == 2);
 			Document newDoc1 = mergedReader.Document(0);
@@ -111,7 +111,7 @@ namespace Lucene.Net.Index
 			System.Collections.Generic.ICollection<string> stored = mergedReader.GetFieldNames(IndexReader.FieldOption.INDEXED_WITH_TERMVECTOR);
 			Assert.IsTrue(stored != null);
 			//System.out.println("stored size: " + stored.size());
-			Assert.IsTrue(stored.Count == 4, "We do not have 4 fields that were indexed with term vector");
+			Assert.IsTrue(stored.Count == 3, "We do not have 3 fields that were indexed with term vector");
 			
 			TermFreqVector vector = mergedReader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
 			Assert.IsTrue(vector != null);

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestSegmentReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestSegmentReader.cs?rev=1202532&r1=1202531&r2=1202532&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestSegmentReader.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestSegmentReader.cs Wed Nov 16 05:24:27 2011
@@ -50,7 +50,7 @@ namespace Lucene.Net.Index
 			base.SetUp();
 			DocHelper.SetupDoc(testDoc);
 			SegmentInfo info = DocHelper.WriteDoc(dir, testDoc);
-			reader = SegmentReader.Get(info);
+            reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 		}
 
         [TearDown]
@@ -79,10 +79,9 @@ namespace Lucene.Net.Index
 			//There are 2 unstored fields on the document that are not preserved across writing
 			Assert.IsTrue(DocHelper.NumFields(result) == DocHelper.NumFields(testDoc) - DocHelper.unstored.Count);
 			
-			System.Collections.IList fields = result.GetFields();
-			for (System.Collections.IEnumerator iter = fields.GetEnumerator(); iter.MoveNext(); )
+			var fields = result.GetFields();
+            foreach (var field in fields)
 			{
-				Fieldable field = (Fieldable) iter.Current;
 				Assert.IsTrue(field != null);
 				Assert.IsTrue(DocHelper.nameValues.Contains(field.Name()));
 			}
@@ -94,7 +93,7 @@ namespace Lucene.Net.Index
 			Document docToDelete = new Document();
 			DocHelper.SetupDoc(docToDelete);
 			SegmentInfo info = DocHelper.WriteDoc(dir, docToDelete);
-			SegmentReader deleteReader = SegmentReader.Get(info);
+            SegmentReader deleteReader = SegmentReader.Get(false, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 			Assert.IsTrue(deleteReader != null);
 			Assert.IsTrue(deleteReader.NumDocs() == 1);
 			deleteReader.DeleteDocument(0);
@@ -199,16 +198,7 @@ namespace Lucene.Net.Index
 						// test for fake norms of 1.0 or null depending on the flag
 						byte[] norms = reader.Norms(f.Name());
 						byte norm1 = DefaultSimilarity.EncodeNorm(1.0f);
-						if (reader.GetDisableFakeNorms())
-							Assert.IsNull(norms);
-						else
-						{
-							Assert.AreEqual(norms.Length, reader.MaxDoc());
-							for (int j = 0; j < reader.MaxDoc(); j++)
-							{
-								Assert.AreEqual(norms[j], norm1);
-							}
-						}
+						Assert.IsNull(norms);
 						norms = new byte[reader.MaxDoc()];
 						reader.Norms(f.Name(), norms, 0);
 						for (int j = 0; j < reader.MaxDoc(); j++)
@@ -238,7 +228,7 @@ namespace Lucene.Net.Index
 			
 			TermFreqVector[] results = reader.GetTermFreqVectors(0);
 			Assert.IsTrue(results != null);
-			Assert.IsTrue(results.Length == 4, "We do not have 4 term freq vectors, we have: " + results.Length);
+			Assert.IsTrue(results.Length == 3, "We do not have 3 term freq vectors, we have: " + results.Length);
 		}
 	}
 }
\ No newline at end of file