You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by ar...@apache.org on 2009/11/03 19:06:38 UTC

svn commit: r832486 [5/29] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene....

Added: incubator/lucene.net/trunk/C#/src/Test/Analysis/Tokenattributes/TestTermAttributeImpl.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/Tokenattributes/TestTermAttributeImpl.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/Tokenattributes/TestTermAttributeImpl.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/Tokenattributes/TestTermAttributeImpl.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,200 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Analysis.Tokenattributes
+{
+	
+    [TestFixture]
+	public class TestTermAttributeImpl:LuceneTestCase
+	{
+		
+		public TestTermAttributeImpl(System.String name):base(name)
+		{
+		}
+		
+        [Test]
+		public virtual void  TestResize()
+		{
+			TermAttributeImpl t = new TermAttributeImpl();
+			char[] content = "hello".ToCharArray();
+			t.SetTermBuffer(content, 0, content.Length);
+			for (int i = 0; i < 2000; i++)
+			{
+				t.ResizeTermBuffer(i);
+				Assert.IsTrue(i <= t.TermBuffer().Length);
+				Assert.AreEqual("hello", t.Term());
+			}
+		}
+		
+        [Test]
+		public virtual void  TestGrow()
+		{
+			TermAttributeImpl t = new TermAttributeImpl();
+			System.Text.StringBuilder buf = new System.Text.StringBuilder("ab");
+			for (int i = 0; i < 20; i++)
+			{
+				char[] content = buf.ToString().ToCharArray();
+				t.SetTermBuffer(content, 0, content.Length);
+				Assert.AreEqual(buf.Length, t.TermLength());
+				Assert.AreEqual(buf.ToString(), t.Term());
+				buf.Append(buf.ToString());
+			}
+			Assert.AreEqual(1048576, t.TermLength());
+			Assert.AreEqual(1179654, t.TermBuffer().Length);
+			
+			// now as a string, first variant
+			t = new TermAttributeImpl();
+			buf = new System.Text.StringBuilder("ab");
+			for (int i = 0; i < 20; i++)
+			{
+				System.String content = buf.ToString();
+				t.SetTermBuffer(content, 0, content.Length);
+				Assert.AreEqual(content.Length, t.TermLength());
+				Assert.AreEqual(content, t.Term());
+				buf.Append(content);
+			}
+			Assert.AreEqual(1048576, t.TermLength());
+			Assert.AreEqual(1179654, t.TermBuffer().Length);
+			
+			// now as a string, second variant
+			t = new TermAttributeImpl();
+			buf = new System.Text.StringBuilder("ab");
+			for (int i = 0; i < 20; i++)
+			{
+				System.String content = buf.ToString();
+				t.SetTermBuffer(content);
+				Assert.AreEqual(content.Length, t.TermLength());
+				Assert.AreEqual(content, t.Term());
+				buf.Append(content);
+			}
+			Assert.AreEqual(1048576, t.TermLength());
+			Assert.AreEqual(1179654, t.TermBuffer().Length);
+			
+			// Test for slow growth to a long term
+			t = new TermAttributeImpl();
+			buf = new System.Text.StringBuilder("a");
+			for (int i = 0; i < 20000; i++)
+			{
+				System.String content = buf.ToString();
+				t.SetTermBuffer(content);
+				Assert.AreEqual(content.Length, t.TermLength());
+				Assert.AreEqual(content, t.Term());
+				buf.Append("a");
+			}
+			Assert.AreEqual(20000, t.TermLength());
+			Assert.AreEqual(20167, t.TermBuffer().Length);
+			
+			// Test for slow growth to a long term
+			t = new TermAttributeImpl();
+			buf = new System.Text.StringBuilder("a");
+			for (int i = 0; i < 20000; i++)
+			{
+				System.String content = buf.ToString();
+				t.SetTermBuffer(content);
+				Assert.AreEqual(content.Length, t.TermLength());
+				Assert.AreEqual(content, t.Term());
+				buf.Append("a");
+			}
+			Assert.AreEqual(20000, t.TermLength());
+			Assert.AreEqual(20167, t.TermBuffer().Length);
+		}
+		
+        [Test]
+		public virtual void  TestToString()
+		{
+			char[] b = new char[]{'a', 'l', 'o', 'h', 'a'};
+			TermAttributeImpl t = new TermAttributeImpl();
+			t.SetTermBuffer(b, 0, 5);
+			Assert.AreEqual("term=aloha", t.ToString());
+			
+			t.SetTermBuffer("hi there");
+			Assert.AreEqual("term=hi there", t.ToString());
+		}
+		
+        [Test]
+		public virtual void  TestMixedStringArray()
+		{
+			TermAttributeImpl t = new TermAttributeImpl();
+			t.SetTermBuffer("hello");
+			Assert.AreEqual(t.TermLength(), 5);
+			Assert.AreEqual(t.Term(), "hello");
+			t.SetTermBuffer("hello2");
+			Assert.AreEqual(t.TermLength(), 6);
+			Assert.AreEqual(t.Term(), "hello2");
+			t.SetTermBuffer("hello3".ToCharArray(), 0, 6);
+			Assert.AreEqual(t.Term(), "hello3");
+			
+			// Make sure if we get the buffer and change a character
+			// that term() reflects the change
+			char[] buffer = t.TermBuffer();
+			buffer[1] = 'o';
+			Assert.AreEqual(t.Term(), "hollo3");
+		}
+		
+        [Test]
+		public virtual void  TestClone()
+		{
+			TermAttributeImpl t = new TermAttributeImpl();
+			char[] content = "hello".ToCharArray();
+			t.SetTermBuffer(content, 0, 5);
+			char[] buf = t.TermBuffer();
+			TermAttributeImpl copy = (TermAttributeImpl) TestSimpleAttributeImpls.AssertCloneIsEqual(t);
+			Assert.AreEqual(t.Term(), copy.Term());
+			Assert.AreNotEqual(buf, copy.TermBuffer());
+		}
+		
+        [Test]
+		public virtual void  TestEquals()
+		{
+			TermAttributeImpl t1a = new TermAttributeImpl();
+			char[] content1a = "hello".ToCharArray();
+			t1a.SetTermBuffer(content1a, 0, 5);
+			TermAttributeImpl t1b = new TermAttributeImpl();
+			char[] content1b = "hello".ToCharArray();
+			t1b.SetTermBuffer(content1b, 0, 5);
+			TermAttributeImpl t2 = new TermAttributeImpl();
+			char[] content2 = "hello2".ToCharArray();
+			t2.SetTermBuffer(content2, 0, 6);
+			Assert.IsTrue(t1a.Equals(t1b));
+			Assert.IsFalse(t1a.Equals(t2));
+			Assert.IsFalse(t2.Equals(t1b));
+		}
+		
+        [Test]
+		public virtual void  TestCopyTo()
+		{
+			TermAttributeImpl t = new TermAttributeImpl();
+			TermAttributeImpl copy = (TermAttributeImpl) TestSimpleAttributeImpls.AssertCopyIsEqual(t);
+			Assert.AreEqual("", t.Term());
+			Assert.AreEqual("", copy.Term());
+			
+			t = new TermAttributeImpl();
+			char[] content = "hello".ToCharArray();
+			t.SetTermBuffer(content, 0, 5);
+			char[] buf = t.TermBuffer();
+			copy = (TermAttributeImpl) TestSimpleAttributeImpls.AssertCopyIsEqual(t);
+			Assert.AreEqual(t.Term(), copy.Term());
+			Assert.AreNotEqual(buf, copy.TermBuffer());
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/App.config
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/App.config?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/App.config (original)
+++ incubator/lucene.net/trunk/C#/src/Test/App.config Tue Nov  3 18:06:27 2009
@@ -1,9 +1,9 @@
 <?xml version="1.0" encoding="utf-8" ?>
 <configuration>
   <appSettings>
-    <add key="tempDir" value="c:\windows\temp\Lucene.Net-Tests"/>
+    <add key="tempDir" value="C:\Windows\Temp\Lucene.Net-Tests"/>
   </appSettings>
-  <!-- when i add this setting and run tests, i get 0 success, 0 failures, 0 tests not run
+  <!-- When I add this setting and run tests, I get 0 success, 0 failures, 0 tests not run
   <appSettings>
     <add key="Lucene.Net.CompressionLib.class" value="Lucene.Net.Index.Compression.SharpZipLibAdapter"/>
   </appSettings>

Modified: incubator/lucene.net/trunk/C#/src/Test/AssemblyInfo.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/AssemblyInfo.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/AssemblyInfo.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/AssemblyInfo.cs Tue Nov  3 18:06:27 2009
@@ -16,7 +16,7 @@
 [assembly: AssemblyDefaultAlias("Lucene.Net")]
 [assembly: AssemblyCulture("")]
 
-[assembly: AssemblyInformationalVersionAttribute("2.4.0")]
+[assembly: AssemblyInformationalVersionAttribute("2.9.0")]
 
 //
 // Version information for an assembly consists of the following four values:
@@ -29,7 +29,7 @@
 // You can specify all the values or you can default the Revision and Build Numbers 
 // by using the '*' as shown below:
 
-[assembly: AssemblyVersion("2.4.0.002")]
+[assembly: AssemblyVersion("2.9.0.001")]
 
 //
 // In order to sign your assembly you must specify a key to use. Refer to the 

Modified: incubator/lucene.net/trunk/C#/src/Test/Document/TestBinaryDocument.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Document/TestBinaryDocument.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Document/TestBinaryDocument.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Document/TestBinaryDocument.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,11 +19,11 @@
 
 using NUnit.Framework;
 
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 
 namespace Lucene.Net.Documents
 {
@@ -32,22 +32,22 @@
 	/// 
 	/// 
 	/// </summary>
-	/// <version>  $Id: TestBinaryDocument.java 598296 2007-11-26 14:52:01Z mikemccand $
+	/// <version>  $Id: TestBinaryDocument.java 756760 2009-03-20 21:10:12Z mikemccand $
 	/// </version>
-	[TestFixture]
-	public class TestBinaryDocument : LuceneTestCase    
+    [TestFixture]
+	public class TestBinaryDocument:LuceneTestCase
 	{
 		
 		internal System.String binaryValStored = "this text will be stored as a byte array in the index";
 		internal System.String binaryValCompressed = "this text will be also stored and compressed as a byte array in the index";
 		
-		[Test]
+        [Test]
 		public virtual void  TestBinaryFieldInIndex()
 		{
-			Lucene.Net.Documents.Fieldable binaryFldStored = new Field("binaryStored", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValStored), Field.Store.YES);
-			Lucene.Net.Documents.Fieldable binaryFldCompressed = new Field("binaryCompressed", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValCompressed), Field.Store.COMPRESS);
-			Lucene.Net.Documents.Fieldable stringFldStored = new Field("stringStored", binaryValStored, Field.Store.YES, Field.Index.NO, Field.TermVector.NO);
-			Lucene.Net.Documents.Fieldable stringFldCompressed = new Field("stringCompressed", binaryValCompressed, Field.Store.COMPRESS, Field.Index.NO, Field.TermVector.NO);
+			Fieldable binaryFldStored = new Field("binaryStored", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValStored), Field.Store.YES);
+			Fieldable binaryFldCompressed = new Field("binaryCompressed", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValCompressed), Field.Store.COMPRESS);
+			Fieldable stringFldStored = new Field("stringStored", binaryValStored, Field.Store.YES, Field.Index.NO, Field.TermVector.NO);
+			Fieldable stringFldCompressed = new Field("stringCompressed", binaryValCompressed, Field.Store.COMPRESS, Field.Index.NO, Field.TermVector.NO);
 			
 			try
 			{
@@ -55,11 +55,12 @@
 				new Field("fail", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValCompressed), Field.Store.NO);
 				Assert.Fail();
 			}
-			catch (System.ArgumentException)
+			catch (System.ArgumentException iae)
 			{
+				;
 			}
 			
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			
 			doc.Add(binaryFldStored);
 			doc.Add(binaryFldCompressed);
@@ -68,25 +69,25 @@
 			doc.Add(stringFldCompressed);
 			
 			/** test for field count */
-			Assert.AreEqual(4, doc.GetFieldsCount());
+			Assert.AreEqual(4, doc.fields_ForNUnit.Count);
 			
 			/** add the doc to a ram index */
-			RAMDirectory dir = new RAMDirectory();
+			MockRAMDirectory dir = new MockRAMDirectory();
 			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.AddDocument(doc);
 			writer.Close();
 			
 			/** open a reader and fetch the document */
 			IndexReader reader = IndexReader.Open(dir);
-			Lucene.Net.Documents.Document docFromReader = reader.Document(0);
+			Document docFromReader = reader.Document(0);
 			Assert.IsTrue(docFromReader != null);
 			
 			/** fetch the binary stored field and compare it's content with the original one */
-			System.String binaryFldStoredTest = System.Text.UTF8Encoding.UTF8.GetString(docFromReader.GetBinaryValue("binaryStored"));
+			System.String binaryFldStoredTest = new System.String(System.Text.UTF8Encoding.UTF8.GetChars(docFromReader.GetBinaryValue("binaryStored")));
 			Assert.IsTrue(binaryFldStoredTest.Equals(binaryValStored));
 			
 			/** fetch the binary compressed field and compare it's content with the original one */
-			System.String binaryFldCompressedTest = System.Text.UTF8Encoding.UTF8.GetString(docFromReader.GetBinaryValue("binaryCompressed"));
+			System.String binaryFldCompressedTest = new System.String(System.Text.UTF8Encoding.UTF8.GetChars(docFromReader.GetBinaryValue("binaryCompressed")));
 			Assert.IsTrue(binaryFldCompressedTest.Equals(binaryValCompressed));
 			
 			/** fetch the string field and compare it's content with the original one */
@@ -102,6 +103,38 @@
 			Assert.AreEqual(0, reader.NumDocs());
 			
 			reader.Close();
+			dir.Close();
+		}
+		
+        [Test]
+		public virtual void  TestCompressionTools()
+		{
+			Fieldable binaryFldCompressed = new Field("binaryCompressed", CompressionTools.Compress(System.Text.UTF8Encoding.UTF8.GetBytes(binaryValCompressed)), Field.Store.YES);
+			Fieldable stringFldCompressed = new Field("stringCompressed", CompressionTools.CompressString(binaryValCompressed), Field.Store.YES);
+			
+			Document doc = new Document();
+			
+			doc.Add(binaryFldCompressed);
+			doc.Add(stringFldCompressed);
+			
+			/** add the doc to a ram index */
+			MockRAMDirectory dir = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			writer.AddDocument(doc);
+			writer.Close();
+			
+			/** open a reader and fetch the document */
+			IndexReader reader = IndexReader.Open(dir);
+			Document docFromReader = reader.Document(0);
+			Assert.IsTrue(docFromReader != null);
+			
+			/** fetch the binary compressed field and compare it's content with the original one */
+			System.String binaryFldCompressedTest = new System.String(System.Text.UTF8Encoding.UTF8.GetChars(CompressionTools.Decompress(docFromReader.GetBinaryValue("binaryCompressed"))));
+			Assert.IsTrue(binaryFldCompressedTest.Equals(binaryValCompressed));
+			Assert.IsTrue(CompressionTools.DecompressString(docFromReader.GetBinaryValue("stringCompressed")).Equals(binaryValCompressed));
+			
+			reader.Close();
+			dir.Close();
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Document/TestDateTools.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Document/TestDateTools.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Document/TestDateTools.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Document/TestDateTools.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,14 +19,17 @@
 
 using NUnit.Framework;
 
+using LocalizedTestCase = Lucene.Net.Util.LocalizedTestCase;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Documents
 {
-	[TestFixture]
-	public class TestDateTools : LuceneTestCase
+	
+    [TestFixture]
+	public class TestDateTools:LocalizedTestCase
 	{
-		[Test]
+		
+        [Test]
 		public virtual void  TestStringToDate()
 		{
 			
@@ -45,7 +48,7 @@
 				d = DateTools.StringToDate("97"); // no date
 				Assert.Fail();
 			}
-			catch (System.FormatException)
+			catch (System.FormatException e)
 			{
 				/* expected exception */
 			}
@@ -54,7 +57,7 @@
 				d = DateTools.StringToDate("200401011235009999"); // no date
 				Assert.Fail();
 			}
-			catch (System.FormatException)
+			catch (System.FormatException e)
 			{
 				/* expected exception */
 			}
@@ -63,13 +66,13 @@
 				d = DateTools.StringToDate("aaaa"); // no date
 				Assert.Fail();
 			}
-			catch (System.FormatException)
+			catch (System.FormatException e)
 			{
 				/* expected exception */
 			}
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestStringtoTime()
 		{
 			long time = DateTools.StringToTime("197001010000");
@@ -82,7 +85,7 @@
 			Assert.AreEqual(cal.Ticks, time);
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestDateAndTimetoString()
 		{
 			System.DateTime cal = new System.DateTime(2004, 2, 3, 22, 8, 56, 333, new System.Globalization.GregorianCalendar());
@@ -154,7 +157,7 @@
 			Assert.AreEqual("19700101010203000", dateString);
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestRound()
 		{
 			System.DateTime date = new System.DateTime(2004, 2, 3, 22, 8, 56, 333, new System.Globalization.GregorianCalendar());
@@ -193,10 +196,10 @@
 		
 		private System.String IsoFormat(System.DateTime date)
 		{
-			return date.ToString("yyyy-MM-dd HH:mm:ss:fff");
-		}
-	
-		[Test]
+            return date.ToString("yyyy-MM-dd HH:mm:ss:fff");
+        }
+		
+        [Test]
 		public virtual void  TestDateToolsUTC()
 		{
 			// Sun, 30 Oct 2005 00:00:00 +0000 -- the last second of 2005's DST in Europe/London

Modified: incubator/lucene.net/trunk/C#/src/Test/Document/TestDocument.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Document/TestDocument.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Document/TestDocument.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Document/TestDocument.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -22,12 +22,12 @@
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using Query = Lucene.Net.Search.Query;
 using ScoreDoc = Lucene.Net.Search.ScoreDoc;
 using Searcher = Lucene.Net.Search.Searcher;
 using TermQuery = Lucene.Net.Search.TermQuery;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Documents
@@ -37,10 +37,10 @@
 	/// 
 	/// 
 	/// </summary>
-	/// <version>  $Id: TestDocument.java 583534 2007-10-10 16:46:35Z mikemccand $
+	/// <version>  $Id: TestDocument.java 754789 2009-03-15 23:24:39Z mikemccand $
 	/// </version>
 	[TestFixture]
-	public class TestDocument : LuceneTestCase
+	public class TestDocument:LuceneTestCase
 	{
 		
 		internal System.String binaryVal = "this text will be stored as a byte array in the index";
@@ -49,22 +49,22 @@
 		[Test]
 		public virtual void  TestBinaryField()
 		{
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			Fieldable stringFld = new Field("string", binaryVal, Field.Store.YES, Field.Index.NO);
-			Fieldable binaryFld = new Field("binary", (new System.Text.ASCIIEncoding()).GetBytes(binaryVal), Field.Store.YES);
-			Fieldable binaryFld2 = new Field("binary", (new System.Text.ASCIIEncoding()).GetBytes(binaryVal2), Field.Store.YES);
-
+			Fieldable binaryFld = new Field("binary", System.Text.UTF8Encoding.UTF8.GetBytes(binaryVal), Field.Store.YES);
+			Fieldable binaryFld2 = new Field("binary", System.Text.UTF8Encoding.UTF8.GetBytes(binaryVal2), Field.Store.YES);
+			
 			doc.Add(stringFld);
 			doc.Add(binaryFld);
-
-			Assert.AreEqual(2, doc.GetFieldsCount());
+			
+			Assert.AreEqual(2, doc.fields_ForNUnit.Count);
 			
 			Assert.IsTrue(binaryFld.IsBinary());
 			Assert.IsTrue(binaryFld.IsStored());
 			Assert.IsFalse(binaryFld.IsIndexed());
 			Assert.IsFalse(binaryFld.IsTokenized());
 			
-			System.String binaryTest = (new System.Text.ASCIIEncoding()).GetString(doc.GetBinaryValue("binary"));
+			System.String binaryTest = new System.String(System.Text.UTF8Encoding.UTF8.GetChars(doc.GetBinaryValue("binary")));
 			Assert.IsTrue(binaryTest.Equals(binaryVal));
 			
 			System.String stringTest = doc.Get("string");
@@ -72,7 +72,7 @@
 			
 			doc.Add(binaryFld2);
 			
-			Assert.AreEqual(3, doc.GetFieldsCount());
+			Assert.AreEqual(3, doc.fields_ForNUnit.Count);
 			
 			byte[][] binaryTests = doc.GetBinaryValues("binary");
 			
@@ -87,10 +87,10 @@
 			Assert.IsTrue(binaryTest2.Equals(binaryVal2));
 			
 			doc.RemoveField("string");
-			Assert.AreEqual(2, doc.GetFieldsCount());
+			Assert.AreEqual(2, doc.fields_ForNUnit.Count);
 			
 			doc.RemoveFields("binary");
-			Assert.AreEqual(0, doc.GetFieldsCount());
+			Assert.AreEqual(0, doc.fields_ForNUnit.Count);
 		}
 		
 		/// <summary> Tests {@link Document#RemoveField(String)} method for a brand new Document
@@ -101,27 +101,27 @@
 		[Test]
 		public virtual void  TestRemoveForNewDocument()
 		{
-			Lucene.Net.Documents.Document doc = MakeDocumentWithFields();
-			Assert.AreEqual(8, doc.GetFieldsCount());
+			Document doc = MakeDocumentWithFields();
+			Assert.AreEqual(8, doc.fields_ForNUnit.Count);
 			doc.RemoveFields("keyword");
-			Assert.AreEqual(6, doc.GetFieldsCount());
+			Assert.AreEqual(6, doc.fields_ForNUnit.Count);
 			doc.RemoveFields("doesnotexists"); // removing non-existing fields is siltenlty ignored
 			doc.RemoveFields("keyword"); // removing a field more than once
-			Assert.AreEqual(6, doc.GetFieldsCount());
+			Assert.AreEqual(6, doc.fields_ForNUnit.Count);
 			doc.RemoveField("text");
-			Assert.AreEqual(5, doc.GetFieldsCount());
+			Assert.AreEqual(5, doc.fields_ForNUnit.Count);
 			doc.RemoveField("text");
-			Assert.AreEqual(4, doc.GetFieldsCount());
+			Assert.AreEqual(4, doc.fields_ForNUnit.Count);
 			doc.RemoveField("text");
-			Assert.AreEqual(4, doc.GetFieldsCount());
+			Assert.AreEqual(4, doc.fields_ForNUnit.Count);
 			doc.RemoveField("doesnotexists"); // removing non-existing fields is siltenlty ignored
-			Assert.AreEqual(4, doc.GetFieldsCount());
+			Assert.AreEqual(4, doc.fields_ForNUnit.Count);
 			doc.RemoveFields("unindexed");
-			Assert.AreEqual(2, doc.GetFieldsCount());
+			Assert.AreEqual(2, doc.fields_ForNUnit.Count);
 			doc.RemoveFields("unstored");
-			Assert.AreEqual(0, doc.GetFieldsCount());
+			Assert.AreEqual(0, doc.fields_ForNUnit.Count);
 			doc.RemoveFields("doesnotexists"); // removing non-existing fields is siltenlty ignored
-			Assert.AreEqual(0, doc.GetFieldsCount());
+			Assert.AreEqual(0, doc.fields_ForNUnit.Count);
 		}
 		
 		[Test]
@@ -134,7 +134,7 @@
 				new Field("name", "value", Field.Store.NO, Field.Index.NO);
 				Assert.Fail();
 			}
-			catch (System.ArgumentException)
+			catch (System.ArgumentException e)
 			{
 				// expected exception
 			}
@@ -144,7 +144,7 @@
 				new Field("name", "value", Field.Store.YES, Field.Index.NO, Field.TermVector.YES);
 				Assert.Fail();
 			}
-			catch (System.ArgumentException)
+			catch (System.ArgumentException e)
 			{
 				// expected exception
 			}
@@ -187,9 +187,9 @@
 			searcher.Close();
 		}
 		
-		private Lucene.Net.Documents.Document MakeDocumentWithFields()
+		private Document MakeDocumentWithFields()
 		{
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field("keyword", "test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
 			doc.Add(new Field("keyword", "test2", Field.Store.YES, Field.Index.NOT_ANALYZED));
 			doc.Add(new Field("text", "test1", Field.Store.YES, Field.Index.ANALYZED));
@@ -201,7 +201,7 @@
 			return doc;
 		}
 		
-		private void  DoAssert(Lucene.Net.Documents.Document doc, bool fromIndex)
+		private void  DoAssert(Document doc, bool fromIndex)
 		{
 			System.String[] keywordFieldValues = doc.GetValues("keyword");
 			System.String[] textFieldValues = doc.GetValues("text");
@@ -276,5 +276,30 @@
 			dir.Close();
 			Assert.AreEqual(7, result, "did not see all IDs");
 		}
+		
+		[Test]
+		public virtual void  TestFieldSetValueChangeBinary()
+		{
+			Field field1 = new Field("field1", new byte[0], Field.Store.YES);
+			Field field2 = new Field("field2", "", Field.Store.YES, Field.Index.ANALYZED);
+			try
+			{
+				field1.SetValue("abc");
+				Assert.Fail("did not hit expected exception");
+			}
+			catch (System.ArgumentException iae)
+			{
+				// expected
+			}
+			try
+			{
+				field2.SetValue(new byte[0]);
+				Assert.Fail("did not hit expected exception");
+			}
+			catch (System.ArgumentException iae)
+			{
+				// expected
+			}
+		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Document/TestNumberTools.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Document/TestNumberTools.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Document/TestNumberTools.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Document/TestNumberTools.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -23,8 +23,9 @@
 
 namespace Lucene.Net.Documents
 {
+	
 	[TestFixture]
-	public class TestNumberTools : LuceneTestCase
+	public class TestNumberTools:LuceneTestCase
 	{
 		[Test]
 		public virtual void  TestNearZero()

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/DocHelper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/DocHelper.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/DocHelper.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/DocHelper.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -22,8 +22,8 @@
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Fieldable = Lucene.Net.Documents.Fieldable;
-using Similarity = Lucene.Net.Search.Similarity;
 using Directory = Lucene.Net.Store.Directory;
+using Similarity = Lucene.Net.Search.Similarity;
 
 namespace Lucene.Net.Index
 {
@@ -45,8 +45,8 @@
 		public static readonly int[] COMPRESSED_FIELD_2_FREQS = new int[]{3, 1, 1};
 		public const System.String COMPRESSED_TEXT_FIELD_2_KEY = "compressedTextField2";
 		public static Field compressedTextField2;
-
-
+		
+		
 		public const System.String FIELD_3_TEXT = "aaaNoNorms aaaNoNorms bbbNoNorms";
 		public const System.String TEXT_FIELD_3_KEY = "textField3";
 		public static Field textField3;
@@ -59,6 +59,10 @@
 		public const System.String NO_NORMS_KEY = "omitNorms";
 		public static Field noNormsField;
 		
+		public const System.String NO_TF_TEXT = "analyzed with no tf and positions";
+		public const System.String NO_TF_KEY = "omitTermFreqAndPositions";
+		public static Field noTFField;
+		
 		public const System.String UNINDEXED_FIELD_TEXT = "unindexed field text";
 		public const System.String UNINDEXED_FIELD_KEY = "unIndField";
 		public static Field unIndField;
@@ -88,7 +92,7 @@
 		public const System.String FIELD_UTF1_TEXT = "field one \u4e00text";
 		public const System.String TEXT_FIELD_UTF1_KEY = "textField1Utf8";
 		public static Field textUtfField1;
-
+		
 		public const System.String FIELD_UTF2_TEXT = "field field field \u4e00two text";
 		//Fields will be lexicographically sorted.  So, the order is: field, text, two
 		public static readonly int[] FIELD_UTF2_FREQS = new int[]{3, 1, 1};
@@ -101,11 +105,10 @@
 		public static System.Collections.IDictionary nameValues = null;
 		
 		// ordered list of all the fields...
-		// this results in null entries in array....
-		//public static Field[] fields = new Field[]{textField1, textField2, textField3, compressedTextField2, keyField, noNormsField, unIndField, unStoredField1, unStoredField2, textUtfField1, textUtfField2, lazyField, lazyFieldBinary, largeLazyField};
-		public static Field[] fields;
+		// could use LinkedHashMap for this purpose if Java1.4 is OK
+		public static Field[] fields = new Field[]{textField1, textField2, textField3, compressedTextField2, keyField, noNormsField, noTFField, unIndField, unStoredField1, unStoredField2, textUtfField1, textUtfField2, lazyField, lazyFieldBinary, largeLazyField};
 		
-		// Map<String fieldName, Field field>
+		// Map<String fieldName, Fieldable field>
 		public static System.Collections.IDictionary all = new System.Collections.Hashtable();
 		public static System.Collections.IDictionary indexed = new System.Collections.Hashtable();
 		public static System.Collections.IDictionary stored = new System.Collections.Hashtable();
@@ -115,26 +118,18 @@
 		public static System.Collections.IDictionary notermvector = new System.Collections.Hashtable();
 		public static System.Collections.IDictionary lazy = new System.Collections.Hashtable();
 		public static System.Collections.IDictionary noNorms = new System.Collections.Hashtable();
+		public static System.Collections.IDictionary noTf = new System.Collections.Hashtable();
 		
 		
 		private static void  Add(System.Collections.IDictionary map, Fieldable field)
 		{
-			if (field == null) System.Console.WriteLine("FIELD IS NULL!!!");
-			if (field == null) System.Console.WriteLine("FIELD IS NULL!!!");
-			if (field == null) System.Console.WriteLine("FIELD IS NULL!!!");
-			if (map == null) System.Console.WriteLine("MAP IS NULL!!!");
-			if (map == null) System.Console.WriteLine("MAP IS NULL!!!");
-			if (map == null) System.Console.WriteLine("MAP IS NULL!!!");
-			if (field.Name() == null) System.Console.WriteLine("FIELD NAME IS NULL!!!");
-			if (field.Name() == null) System.Console.WriteLine("FIELD NAME IS NULL!!!");
-			if (field.Name() == null) System.Console.WriteLine("FIELD NAME IS NULL!!!");
 			map[field.Name()] = field;
 		}
 		
 		/// <summary> Adds the fields above to a document </summary>
 		/// <param name="doc">The document to write
 		/// </param>
-		public static void  SetupDoc(Lucene.Net.Documents.Document doc)
+		public static void  SetupDoc(Document doc)
 		{
 			for (int i = 0; i < fields.Length; i++)
 			{
@@ -195,7 +190,11 @@
 				textField3.SetOmitNorms(true);
 			}
 			keyField = new Field(KEYWORD_FIELD_KEY, KEYWORD_TEXT, Field.Store.YES, Field.Index.NOT_ANALYZED);
-			noNormsField = new Field(NO_NORMS_KEY, NO_NORMS_TEXT, Field.Store.YES, Field.Index.NO_NORMS);
+			noNormsField = new Field(NO_NORMS_KEY, NO_NORMS_TEXT, Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS);
+			noTFField = new Field(NO_TF_KEY, NO_TF_TEXT, Field.Store.YES, Field.Index.ANALYZED);
+			{
+				noTFField.SetOmitTermFreqAndPositions(true);
+			}
 			unIndField = new Field(UNINDEXED_FIELD_KEY, UNINDEXED_FIELD_TEXT, Field.Store.YES, Field.Index.NO);
 			unStoredField1 = new Field(UNSTORED_FIELD_1_KEY, UNSTORED_1_FIELD_TEXT, Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO);
 			unStoredField2 = new Field(UNSTORED_FIELD_2_KEY, UNSTORED_2_FIELD_TEXT, Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES);
@@ -209,18 +208,19 @@
 				{
 					buffer.Append("Lazily loading lengths of language in lieu of laughing ");
 				}
-
+				
 				try
 				{
-					LAZY_FIELD_BINARY_BYTES = System.Text.Encoding.GetEncoding("UTF-8").GetBytes("These are some binary field bytes");
+					LAZY_FIELD_BINARY_BYTES = System.Text.Encoding.GetEncoding("UTF8").GetBytes("These are some binary field bytes");
 				}
-				catch (System.IO.IOException)
+				catch (System.IO.IOException e)
 				{
 				}
 				lazyFieldBinary = new Field(LAZY_FIELD_BINARY_KEY, LAZY_FIELD_BINARY_BYTES, Field.Store.YES);
+				fields[fields.Length - 2] = lazyFieldBinary;
 				LARGE_LAZY_FIELD_TEXT = buffer.ToString();
 				largeLazyField = new Field(LARGE_LAZY_FIELD_KEY, LARGE_LAZY_FIELD_TEXT, Field.Store.YES, Field.Index.ANALYZED);
-				fields = new Field[] { textField1, textField2, textField3, compressedTextField2, keyField, noNormsField, unIndField, unStoredField1, unStoredField2, textUtfField1, textUtfField2, lazyField, lazyFieldBinary, largeLazyField };
+				fields[fields.Length - 1] = largeLazyField;
 				for (int i = 0; i < fields.Length; i++)
 				{
 					Fieldable f = fields[i];
@@ -239,11 +239,12 @@
 						Add(unstored, f);
 					if (f.GetOmitNorms())
 						Add(noNorms, f);
+					if (f.GetOmitTf())
+						Add(noTf, f);
 					if (f.IsLazy())
 						Add(lazy, f);
 				}
 			}
-
 			{
 				nameValues = new System.Collections.Hashtable();
 				nameValues[TEXT_FIELD_1_KEY] = FIELD_1_TEXT;
@@ -252,6 +253,7 @@
 				nameValues[TEXT_FIELD_3_KEY] = FIELD_3_TEXT;
 				nameValues[KEYWORD_FIELD_KEY] = KEYWORD_TEXT;
 				nameValues[NO_NORMS_KEY] = NO_NORMS_TEXT;
+				nameValues[NO_TF_KEY] = NO_TF_TEXT;
 				nameValues[UNINDEXED_FIELD_KEY] = UNINDEXED_FIELD_TEXT;
 				nameValues[UNSTORED_FIELD_1_KEY] = UNSTORED_1_FIELD_TEXT;
 				nameValues[UNSTORED_FIELD_2_KEY] = UNSTORED_2_FIELD_TEXT;

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/MockIndexInput.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/MockIndexInput.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/MockIndexInput.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/MockIndexInput.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -17,14 +17,16 @@
 
 using System;
 
+using NUnit.Framework;
+
 using BufferedIndexInput = Lucene.Net.Store.BufferedIndexInput;
 
 namespace Lucene.Net.Index
 {
 	
-	public class MockIndexInput : BufferedIndexInput
+	[TestFixture]
+	public class MockIndexInput:BufferedIndexInput
 	{
-        // i consider this weird
 		new private byte[] buffer;
 		private int pointer = 0;
 		private long length;
@@ -35,7 +37,7 @@
 			length = bytes.Length;
 		}
 		
-		protected override void  ReadInternal(byte[] dest, int destOffset, int len)
+		public override void  ReadInternal(byte[] dest, int destOffset, int len)
 		{
 			int remainder = len;
 			int start = pointer;
@@ -58,7 +60,7 @@
 			// ignore
 		}
 		
-		protected override void  SeekInternal(long pos)
+		public override void  SeekInternal(long pos)
 		{
 			pointer = (int) pos;
 		}

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestAddIndexesNoOptimize.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestAddIndexesNoOptimize.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestAddIndexesNoOptimize.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestAddIndexesNoOptimize.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,21 +19,20 @@
 
 using NUnit.Framework;
 
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Directory = Lucene.Net.Store.Directory;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using PhraseQuery = Lucene.Net.Search.PhraseQuery;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Index
 {
 	
 	[TestFixture]
-	public class TestAddIndexesNoOptimize : LuceneTestCase
+	public class TestAddIndexesNoOptimize:LuceneTestCase
 	{
 		[Test]
 		public virtual void  TestSimpleCase()
@@ -130,126 +129,128 @@
 		}
 		
 		[Test]
-  public void TestWithPendingDeletes() {
-    // main directory
-    Directory dir = new RAMDirectory();
-    // auxiliary directory
-    Directory aux = new RAMDirectory();
-
-    SetUpDirs(dir, aux);
-    IndexWriter writer = NewWriter(dir, false);
-    writer.AddIndexesNoOptimize(new Directory[] {aux});
-
-    // Adds 10 docs, then replaces them with another 10
-    // docs, so 10 pending deletes:
-    for (int i = 0; i < 20; i++) {
-      Document doc = new Document();
-      doc.Add(new Field("id", "" + (i % 10), Field.Store.NO, Field.Index.NOT_ANALYZED));
-      doc.Add(new Field("content", "bbb " + i, Field.Store.NO,
-                        Field.Index.ANALYZED));
-      writer.UpdateDocument(new Term("id", "" + (i%10)), doc);
-    }
-    // Deletes one of the 10 added docs, leaving 9:
-    PhraseQuery q = new PhraseQuery();
-    q.Add(new Term("content", "bbb"));
-    q.Add(new Term("content", "14"));
-    writer.DeleteDocuments(q);
-
-    writer.Optimize();
-
-    VerifyNumDocs(dir, 1039);
-    VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
-    VerifyTermDocs(dir, new Term("content", "bbb"), 9);
-
-    writer.Close();
-    dir.Close();
-    aux.Close();
-  }
-
-		[Test]
-  public void TestWithPendingDeletes2() {
-    // main directory
-    Directory dir = new RAMDirectory();
-    // auxiliary directory
-    Directory aux = new RAMDirectory();
-
-    SetUpDirs(dir, aux);
-    IndexWriter writer = NewWriter(dir, false);
-
-    // Adds 10 docs, then replaces them with another 10
-    // docs, so 10 pending deletes:
-    for (int i = 0; i < 20; i++) {
-      Document doc = new Document();
-      doc.Add(new Field("id", "" + (i % 10), Field.Store.NO, Field.Index.NOT_ANALYZED));
-      doc.Add(new Field("content", "bbb " + i, Field.Store.NO,
-                        Field.Index.ANALYZED));
-      writer.UpdateDocument(new Term("id", "" + (i%10)), doc);
-    }
-
-    writer.AddIndexesNoOptimize(new Directory[] {aux});
-
-    // Deletes one of the 10 added docs, leaving 9:
-    PhraseQuery q = new PhraseQuery();
-    q.Add(new Term("content", "bbb"));
-    q.Add(new Term("content", "14"));
-    writer.DeleteDocuments(q);
-
-    writer.Optimize();
-
-    VerifyNumDocs(dir, 1039);
-    VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
-    VerifyTermDocs(dir, new Term("content", "bbb"), 9);
-
-    writer.Close();
-    dir.Close();
-    aux.Close();
-  }
-
-		[Test]
-  public void TestWithPendingDeletes3() {
-    // main directory
-    Directory dir = new RAMDirectory();
-    // auxiliary directory
-    Directory aux = new RAMDirectory();
-
-    SetUpDirs(dir, aux);
-    IndexWriter writer = NewWriter(dir, false);
-
-    // Adds 10 docs, then replaces them with another 10
-    // docs, so 10 pending deletes:
-    for (int i = 0; i < 20; i++) {
-      Document doc = new Document();
-      doc.Add(new Field("id", "" + (i % 10), Field.Store.NO, Field.Index.NOT_ANALYZED));
-      doc.Add(new Field("content", "bbb " + i, Field.Store.NO,
-                        Field.Index.ANALYZED));
-      writer.UpdateDocument(new Term("id", "" + (i%10)), doc);
-    }
-
-    // Deletes one of the 10 added docs, leaving 9:
-    PhraseQuery q = new PhraseQuery();
-    q.Add(new Term("content", "bbb"));
-    q.Add(new Term("content", "14"));
-    writer.DeleteDocuments(q);
-
-    writer.AddIndexesNoOptimize(new Directory[] {aux});
-
-    writer.Optimize();
-
-    VerifyNumDocs(dir, 1039);
-    VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
-    VerifyTermDocs(dir, new Term("content", "bbb"), 9);
-
-    writer.Close();
-    dir.Close();
-    aux.Close();
-  }
-
-
+		public virtual void  TestWithPendingDeletes()
+		{
+			// main directory
+			Directory dir = new RAMDirectory();
+			// auxiliary directory
+			Directory aux = new RAMDirectory();
+			
+			SetUpDirs(dir, aux);
+			IndexWriter writer = NewWriter(dir, false);
+			writer.AddIndexesNoOptimize(new Directory[]{aux});
+			
+			// Adds 10 docs, then replaces them with another 10
+			// docs, so 10 pending deletes:
+			for (int i = 0; i < 20; i++)
+			{
+				Document doc = new Document();
+				doc.Add(new Field("id", "" + (i % 10), Field.Store.NO, Field.Index.NOT_ANALYZED));
+				doc.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.ANALYZED));
+				writer.UpdateDocument(new Term("id", "" + (i % 10)), doc);
+			}
+			// Deletes one of the 10 added docs, leaving 9:
+			PhraseQuery q = new PhraseQuery();
+			q.add(new Term("content", "bbb"));
+			q.add(new Term("content", "14"));
+			writer.DeleteDocuments(q);
+			
+			writer.Optimize();
+			
+			VerifyNumDocs(dir, 1039);
+			VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
+			VerifyTermDocs(dir, new Term("content", "bbb"), 9);
+			
+			writer.Close();
+			dir.Close();
+			aux.Close();
+		}
+		
+		[Test]
+		public virtual void  TestWithPendingDeletes2()
+		{
+			// main directory
+			Directory dir = new RAMDirectory();
+			// auxiliary directory
+			Directory aux = new RAMDirectory();
+			
+			SetUpDirs(dir, aux);
+			IndexWriter writer = NewWriter(dir, false);
+			
+			// Adds 10 docs, then replaces them with another 10
+			// docs, so 10 pending deletes:
+			for (int i = 0; i < 20; i++)
+			{
+				Document doc = new Document();
+				doc.Add(new Field("id", "" + (i % 10), Field.Store.NO, Field.Index.NOT_ANALYZED));
+				doc.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.ANALYZED));
+				writer.UpdateDocument(new Term("id", "" + (i % 10)), doc);
+			}
+			
+			writer.AddIndexesNoOptimize(new Directory[]{aux});
+			
+			// Deletes one of the 10 added docs, leaving 9:
+			PhraseQuery q = new PhraseQuery();
+			q.add(new Term("content", "bbb"));
+			q.add(new Term("content", "14"));
+			writer.DeleteDocuments(q);
+			
+			writer.Optimize();
+			
+			VerifyNumDocs(dir, 1039);
+			VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
+			VerifyTermDocs(dir, new Term("content", "bbb"), 9);
+			
+			writer.Close();
+			dir.Close();
+			aux.Close();
+		}
+		
+		[Test]
+		public virtual void  TestWithPendingDeletes3()
+		{
+			// main directory
+			Directory dir = new RAMDirectory();
+			// auxiliary directory
+			Directory aux = new RAMDirectory();
+			
+			SetUpDirs(dir, aux);
+			IndexWriter writer = NewWriter(dir, false);
+			
+			// Adds 10 docs, then replaces them with another 10
+			// docs, so 10 pending deletes:
+			for (int i = 0; i < 20; i++)
+			{
+				Document doc = new Document();
+				doc.Add(new Field("id", "" + (i % 10), Field.Store.NO, Field.Index.NOT_ANALYZED));
+				doc.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.ANALYZED));
+				writer.UpdateDocument(new Term("id", "" + (i % 10)), doc);
+			}
+			
+			// Deletes one of the 10 added docs, leaving 9:
+			PhraseQuery q = new PhraseQuery();
+			q.add(new Term("content", "bbb"));
+			q.add(new Term("content", "14"));
+			writer.DeleteDocuments(q);
+			
+			writer.AddIndexesNoOptimize(new Directory[]{aux});
+			
+			writer.Optimize();
+			
+			VerifyNumDocs(dir, 1039);
+			VerifyTermDocs(dir, new Term("content", "aaa"), 1030);
+			VerifyTermDocs(dir, new Term("content", "bbb"), 9);
+			
+			writer.Close();
+			dir.Close();
+			aux.Close();
+		}
+		
 		// case 0: add self or exceed maxMergeDocs, expect exception
 		[Test]
 		public virtual void  TestAddSelf()
-        {
-            // main directory
+		{
+			// main directory
 			Directory dir = new RAMDirectory();
 			// auxiliary directory
 			Directory aux = new RAMDirectory();
@@ -281,7 +282,7 @@
 				writer.AddIndexesNoOptimize(new Directory[]{aux, dir});
 				Assert.IsTrue(false);
 			}
-			catch (System.ArgumentException)
+			catch (System.ArgumentException e)
 			{
 				Assert.AreEqual(100, writer.DocCount());
 			}
@@ -355,11 +356,12 @@
 			Directory aux = new RAMDirectory();
 			
 			SetUpDirs(dir, aux);
-
+			
 			IndexWriter writer = NewWriter(dir, false);
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(4);
-			writer.AddIndexesNoOptimize(new Directory[] { aux, new RAMDirectory(aux) });
+			
+			writer.AddIndexesNoOptimize(new Directory[]{aux, new RAMDirectory(aux)});
 			Assert.AreEqual(1060, writer.DocCount());
 			Assert.AreEqual(1000, writer.GetDocCount(0));
 			writer.Close();
@@ -452,7 +454,7 @@
 		private IndexWriter NewWriter(Directory dir, bool create)
 		{
 			IndexWriter writer = new IndexWriter(dir, true, new WhitespaceAnalyzer(), create);
-			writer.SetMergePolicy(new LogDocMergePolicy());
+			writer.SetMergePolicy(new LogDocMergePolicy(writer));
 			return writer;
 		}
 		
@@ -460,7 +462,7 @@
 		{
 			for (int i = 0; i < numDocs; i++)
 			{
-				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+				Document doc = new Document();
 				doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
 				writer.AddDocument(doc);
 			}
@@ -470,7 +472,7 @@
 		{
 			for (int i = 0; i < numDocs; i++)
 			{
-				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+				Document doc = new Document();
 				doc.Add(new Field("content", "bbb", Field.Store.NO, Field.Index.ANALYZED));
 				writer.AddDocument(doc);
 			}
@@ -525,45 +527,64 @@
 			Assert.AreEqual(3, writer.GetSegmentCount());
 			writer.Close();
 		}
-
-        // LUCENE-1270
-        [Test]
-        public void TestHangOnClose()
-        {
-            Directory dir = new MockRAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-            writer.SetMergePolicy(new LogByteSizeMergePolicy());
-            writer.SetMaxBufferedDocs(5);
-            writer.SetUseCompoundFile(false);
-            writer.SetMergeFactor(100);
-
-            Document doc = new Document();
-            doc.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
-            for (int i = 0; i < 60; i++)
-                writer.AddDocument(doc);
-            writer.SetMaxBufferedDocs(200);
-            Document doc2 = new Document();
-            doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
-            doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
-            doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
-            doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
-            for (int i = 0; i < 10; i++)
-                writer.AddDocument(doc2);
-            writer.Close();
-
-            Directory dir2 = new MockRAMDirectory();
-            writer = new IndexWriter(dir2, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-            LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy();
-            lmp.SetMinMergeMB(0.0001);
-            writer.SetMergePolicy(lmp);
-            writer.SetMergeFactor(4);
-            writer.SetUseCompoundFile(false);
-            writer.SetMergeScheduler(new SerialMergeScheduler());
-            writer.AddIndexesNoOptimize(new Directory[] { dir });
-            writer.Close();
-
-            dir.Close();
-            dir2.Close();
-        }
-    }
+		
+		// LUCENE-1270
+		[Test]
+		public virtual void  TestHangOnClose()
+		{
+			
+			Directory dir = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetMergePolicy(new LogByteSizeMergePolicy(writer));
+			writer.SetMaxBufferedDocs(5);
+			writer.SetUseCompoundFile(false);
+			writer.SetMergeFactor(100);
+			
+			Document doc = new Document();
+			doc.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+			for (int i = 0; i < 60; i++)
+				writer.AddDocument(doc);
+			writer.SetMaxBufferedDocs(200);
+			Document doc2 = new Document();
+			doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
+			doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
+			doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
+			doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO));
+			for (int i = 0; i < 10; i++)
+				writer.AddDocument(doc2);
+			writer.Close();
+			
+			Directory dir2 = new MockRAMDirectory();
+			writer = new IndexWriter(dir2, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(writer);
+			lmp.SetMinMergeMB(0.0001);
+			writer.SetMergePolicy(lmp);
+			writer.SetMergeFactor(4);
+			writer.SetUseCompoundFile(false);
+			writer.SetMergeScheduler(new SerialMergeScheduler());
+			writer.AddIndexesNoOptimize(new Directory[]{dir});
+			writer.Close();
+			dir.Close();
+			dir2.Close();
+		}
+		
+		// LUCENE-1642: make sure CFS of destination indexwriter
+		// is respected when copying tail segments
+		[Test]
+		public virtual void  TestTargetCFS()
+		{
+			Directory dir = new RAMDirectory();
+			IndexWriter writer = NewWriter(dir, true);
+			writer.SetUseCompoundFile(false);
+			AddDocs(writer, 1);
+			writer.Close();
+			
+			Directory other = new RAMDirectory();
+			writer = NewWriter(other, true);
+			writer.SetUseCompoundFile(true);
+			writer.AddIndexesNoOptimize(new Directory[]{dir});
+			Assert.IsTrue(writer.NewestSegment().GetUseCompoundFile());
+			writer.Close();
+		}
+	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestAtomicUpdate.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestAtomicUpdate.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestAtomicUpdate.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestAtomicUpdate.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,40 +19,56 @@
 
 using NUnit.Framework;
 
+using Lucene.Net.Analysis;
 using Lucene.Net.Documents;
-using Lucene.Net.Index;
 using Lucene.Net.QueryParsers;
 using Lucene.Net.Store;
 using Lucene.Net.Util;
-using Lucene.Net.Analysis;
 using Lucene.Net.Search;
-using Searchable = Lucene.Net.Search.Searchable;
+using English = Lucene.Net.Util.English;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using _TestUtil = Lucene.Net.Util._TestUtil;
 
 namespace Lucene.Net.Index
 {
 	
 	[TestFixture]
-	public class TestAtomicUpdate : LuceneTestCase
+	public class TestAtomicUpdate:LuceneTestCase
 	{
 		private static readonly Analyzer ANALYZER = new SimpleAnalyzer();
-		private static readonly System.Random RANDOM = new System.Random();
-
-        public class MockIndexWriter : IndexWriter
-        {
-            public MockIndexWriter(Directory d, bool autoCommit, Analyzer a, bool create)
-                : base(d, autoCommit, a, create)
-            {
-            }
-
-            override protected bool TestPoint(string name)
-            {
-                if (RANDOM.Next(4) == 2)
-                    System.Threading.Thread.Sleep(1);
-                return true;
-            }
-        }
-
-		abstract public class TimedThread : SupportClass.ThreadClass
+		private System.Random RANDOM;
+		
+		public class MockIndexWriter:IndexWriter
+		{
+			private void  InitBlock(TestAtomicUpdate enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestAtomicUpdate enclosingInstance;
+			public TestAtomicUpdate Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			
+			public MockIndexWriter(TestAtomicUpdate enclosingInstance, Directory dir, bool autoCommit, Analyzer a, bool create):base(dir, autoCommit, a, create)
+			{
+				InitBlock(enclosingInstance);
+			}
+			
+			public /*internal*/ override bool TestPoint(System.String name)
+			{
+				//      if (name.equals("startCommit")) {
+				if (Enclosing_Instance.RANDOM.Next(4) == 2)
+					System.Threading.Thread.Sleep(0);
+				return true;
+			}
+		}
+		
+		abstract public class TimedThread:SupportClass.ThreadClass
 		{
 			internal bool failed;
 			internal int count;
@@ -68,13 +84,13 @@
 			
 			override public void  Run()
 			{
-				long stopTime = (System.DateTime.Now.Ticks - 621355968000000000) / 10000 + 1000 * RUN_TIME_SEC;
+				long stopTime = System.DateTime.Now.Millisecond + 1000 * RUN_TIME_SEC;
 				
 				count = 0;
 				
 				try
 				{
-					while ((System.DateTime.Now.Ticks - 621355968000000000) / 10000 < stopTime && !AnyErrors())
+					while (System.DateTime.Now.Millisecond < stopTime && !AnyErrors())
 					{
 						DoWork();
 						count++;
@@ -82,8 +98,8 @@
 				}
 				catch (System.Exception e)
 				{
-                    System.Console.Out.WriteLine(System.Threading.Thread.CurrentThread.Name + ": exc");
-                    System.Console.Out.WriteLine(e.StackTrace);
+					System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": exc");
+					System.Console.Out.WriteLine(e.StackTrace);
 					failed = true;
 				}
 			}
@@ -97,10 +113,10 @@
 			}
 		}
 		
-		private class IndexerThread : TimedThread
+		private class IndexerThread:TimedThread
 		{
 			internal IndexWriter writer;
-			//new public int count;
+			new public int count;
 			
 			public IndexerThread(IndexWriter writer, TimedThread[] threads):base(threads)
 			{
@@ -120,7 +136,7 @@
 			}
 		}
 		
-		private class SearcherThread : TimedThread
+		private class SearcherThread:TimedThread
 		{
 			private Directory directory;
 			
@@ -146,10 +162,10 @@
 			
 			TimedThread[] threads = new TimedThread[4];
 			
-			IndexWriter writer = new MockIndexWriter(directory, true, ANALYZER, true);
-            writer.SetMaxBufferedDocs(7);
-            writer.SetMergeFactor(3);
-
+			IndexWriter writer = new MockIndexWriter(this, directory, true, ANALYZER, true);
+			writer.SetMaxBufferedDocs(7);
+			writer.SetMergeFactor(3);
+			
 			// Establish a base index of 100 docs:
 			for (int i = 0; i < 100; i++)
 			{
@@ -159,11 +175,11 @@
 				writer.AddDocument(d);
 			}
 			writer.Commit();
-
-            IndexReader r = IndexReader.Open(directory);
-            Assert.AreEqual(100, r.NumDocs());
-            r.Close();
-
+			
+			IndexReader r = IndexReader.Open(directory);
+			Assert.AreEqual(100, r.NumDocs());
+			r.Close();
+			
 			IndexerThread indexerThread = new IndexerThread(writer, threads);
 			threads[0] = indexerThread;
 			indexerThread.Start();
@@ -203,7 +219,7 @@
 		[Test]
 		public virtual void  TestAtomicUpdates()
 		{
-			
+			RANDOM = NewRandom();
 			Directory directory;
 			
 			// First in a RAM directory:
@@ -213,8 +229,8 @@
 			
 			// Second in an FSDirectory:
 			System.String tempDir = System.IO.Path.GetTempPath();
-			System.IO.FileInfo dirPath = new System.IO.FileInfo(tempDir + "\\" + "lucene.test.atomic");
-			directory = FSDirectory.GetDirectory(dirPath);
+			System.IO.FileInfo dirPath = new System.IO.FileInfo(System.IO.Path.Combine(tempDir, "lucene.test.atomic"));
+			directory = FSDirectory.Open(dirPath);
 			RunTest(directory);
 			directory.Close();
 			_TestUtil.RmDir(dirPath);

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestBackwardsCompatibility.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestBackwardsCompatibility.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestBackwardsCompatibility.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestBackwardsCompatibility.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -22,11 +22,11 @@
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
+using Directory = Lucene.Net.Store.Directory;
+using FSDirectory = Lucene.Net.Store.FSDirectory;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using ScoreDoc = Lucene.Net.Search.ScoreDoc;
 using TermQuery = Lucene.Net.Search.TermQuery;
-using Directory = Lucene.Net.Store.Directory;
-using FSDirectory = Lucene.Net.Store.FSDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 using _TestUtil = Lucene.Net.Util._TestUtil;
 
@@ -38,23 +38,25 @@
 	against it, and add documents to it.*/
 	
 	[TestFixture]
-	public class TestBackwardsCompatibility : LuceneTestCase
+	public class TestBackwardsCompatibility:LuceneTestCase
 	{
 		
-		// Uncomment these cases & run them on an older Lucene version
-        // to generate an index to test backwards compatibility.
-        // Then cd to build/test/index.cfs and run "zip index.<VERSION>.cfs.zip *";
-        // cd to build/test/index.nocfs and run "zip index.<VERSION>.nocfs.zip *".
-        // Then move those 2 zip files to your trunk checkout and add them to the
-        // oldNames array.
+		// Uncomment these cases & run them on an older Lucene
+		// version, to generate an index to test backwards
+		// compatibility.  Then, cd to build/test/index.cfs and
+		// run "zip index.<VERSION>.cfs.zip *"; cd to
+		// build/test/index.nocfs and run "zip
+		// index.<VERSION>.nocfs.zip *".  Then move those 2 zip
+		// files to your trunk checkout and add them to the
+		// oldNames array.
 		
 		/*
 		public void testCreatePreLocklessCFS() throws IOException {
-		CreateIndex("index.cfs", true);
+		createIndex("index.cfs", true);
 		}
 		
 		public void testCreatePreLocklessNoCFS() throws IOException {
-		CreateIndex("index.nocfs", false);
+		createIndex("index.nocfs", false);
 		}
 		*/
 		
@@ -113,46 +115,35 @@
 			RmDir(dirName);
 		}
 		
-		internal readonly string[] oldNames = new string[] {
-            "19.cfs",    
-            "19.nocfs",    
-            "20.cfs",    
-            "20.nocfs",    
-            "21.cfs",    
-            "21.nocfs",    
-            "22.cfs",    
-            "22.nocfs",    
-            "23.cfs",    
-            "23.nocfs",    
-        };
-
-        [Test]
-        public void TestOptimizeOldIndex()
-        {
-            for (int i = 0; i < oldNames.Length; i++)
-            {
-                string dirName = @"Index\index." + oldNames[i];
-                Unzip(dirName, oldNames[i]);
-                string fullPath = FullDir(oldNames[i]);
-                Directory dir = FSDirectory.GetDirectory(fullPath);
-                IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
-                w.Optimize();
-                w.Close();
-
-                _TestUtil.CheckIndex(dir);
-                dir.Close();
-                RmDir(oldNames[i]);
-            }
-        }
-
-        [Test]
-        public virtual void TestSearchOldIndex()
+		internal System.String[] oldNames = new System.String[]{"19.cfs", "19.nocfs", "20.cfs", "20.nocfs", "21.cfs", "21.nocfs", "22.cfs", "22.nocfs", "23.cfs", "23.nocfs", "24.cfs", "24.nocfs"};
+		
+		[Test]
+		public virtual void  TestOptimizeOldIndex()
+		{
+			for (int i = 0; i < oldNames.Length; i++)
+			{
+				System.String dirName = "src/test/org/apache/lucene/index/index." + oldNames[i];
+				Unzip(dirName, oldNames[i]);
+				System.String fullPath = FullDir(oldNames[i]);
+				Directory dir = FSDirectory.Open(new System.IO.FileInfo(fullPath));
+				IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+				w.Optimize();
+				w.Close();
+				
+				_TestUtil.CheckIndex(dir);
+				dir.Close();
+				RmDir(oldNames[i]);
+			}
+		}
+		
+		[Test]
+		public virtual void  TestSearchOldIndex()
 		{
 			for (int i = 0; i < oldNames.Length; i++)
 			{
-				System.String dirName = @"Index\index." + oldNames[i];
+				System.String dirName = "src/test/org/apache/lucene/index/index." + oldNames[i];
 				Unzip(dirName, oldNames[i]);
-				SearchIndex(oldNames[i], oldNames[i]);
+				searchIndex(oldNames[i], oldNames[i]);
 				RmDir(oldNames[i]);
 			}
 		}
@@ -162,7 +153,7 @@
 		{
 			for (int i = 0; i < oldNames.Length; i++)
 			{
-				System.String dirName = @"Index\index." + oldNames[i];
+				System.String dirName = "src/test/org/apache/lucene/index/index." + oldNames[i];
 				Unzip(dirName, oldNames[i]);
 				ChangeIndexNoAdds(oldNames[i], true);
 				RmDir(oldNames[i]);
@@ -178,7 +169,7 @@
 		{
 			for (int i = 0; i < oldNames.Length; i++)
 			{
-				System.String dirName = @"Index\index." + oldNames[i];
+				System.String dirName = "src/test/org/apache/lucene/index/index." + oldNames[i];
 				Unzip(dirName, oldNames[i]);
 				ChangeIndexWithAdds(oldNames[i], true);
 				RmDir(oldNames[i]);
@@ -188,92 +179,104 @@
 				RmDir(oldNames[i]);
 			}
 		}
-
-        private void TestHits(ScoreDoc[] hits, int expectedCount, IndexReader reader)
-        {
-            int hitCount = hits.Length;
-            Assert.AreEqual(expectedCount, hitCount, "wrong number of hits");
-            for (int i = 0; i < hitCount; i++)
-            {
-                reader.Document(hits[i].doc);
-                reader.GetTermFreqVectors(hits[i].doc);
-            }
-        }
-
-        public virtual void SearchIndex(string dirName, string oldName)
+		
+		private void  TestHits(ScoreDoc[] hits, int expectedCount, IndexReader reader)
+		{
+			int hitCount = hits.Length;
+			Assert.AreEqual(expectedCount, hitCount, "wrong number of hits");
+			for (int i = 0; i < hitCount; i++)
+			{
+				reader.Document(hits[i].doc);
+				reader.GetTermFreqVectors(hits[i].doc);
+			}
+		}
+		
+		public virtual void  searchIndex(System.String dirName, System.String oldName)
 		{
 			//QueryParser parser = new QueryParser("contents", new WhitespaceAnalyzer());
 			//Query query = parser.parse("handle:1");
 			
 			dirName = FullDir(dirName);
 			
-			Directory dir = FSDirectory.GetDirectory(dirName);
+			Directory dir = FSDirectory.Open(new System.IO.FileInfo(dirName));
 			IndexSearcher searcher = new IndexSearcher(dir);
-            IndexReader reader = searcher.GetIndexReader();
-
-            _TestUtil.CheckIndex(dir);
-
-            for (int i = 0; i < 35; i++)
-            {
-                if (!reader.IsDeleted(i))
-                {
-                    Document d = reader.Document(i);
-                    System.Collections.IList fields = d.GetFields();
-                    if (oldName.StartsWith("23."))
-                    {
-                        Assert.AreEqual(4, fields.Count);
-                        Field f = (Field)d.GetField("id");
-                        Assert.AreEqual("" + i, f.StringValue());
-
-                        f = (Field)d.GetField("utf8");
-                        Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.StringValue());
-
-                        f = (Field)d.GetField("autf8");
-                        Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.StringValue());
-
-                        f = (Field)d.GetField("content2");
-                        Assert.AreEqual("here is more content with aaa aaa aaa", f.StringValue());
-                    }
-                }
-                else
-                    // only ID 7 is deleted
-                    Assert.AreEqual(7, i);
-            }
-
-            ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
-
-			// First document should be #21 since it's norm was increased:
+			IndexReader reader = searcher.GetIndexReader();
+			
+			_TestUtil.CheckIndex(dir);
+			
+			for (int i = 0; i < 35; i++)
+			{
+				if (!reader.IsDeleted(i))
+				{
+					Document d = reader.Document(i);
+					System.Collections.IList fields = d.GetFields();
+					if (!oldName.StartsWith("19.") && !oldName.StartsWith("20.") && !oldName.StartsWith("21.") && !oldName.StartsWith("22."))
+					{
+						
+						if (d.GetField("content3") == null)
+						{
+							Assert.AreEqual(5, fields.Count);
+							Field f = (Field) d.GetField("id");
+							Assert.AreEqual("" + i, f.StringValue());
+							
+							f = (Field) d.GetField("utf8");
+							Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.StringValue());
+							
+							f = (Field) d.GetField("autf8");
+							Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.StringValue());
+							
+							f = (Field) d.GetField("content2");
+							Assert.AreEqual("here is more content with aaa aaa aaa", f.StringValue());
+							
+							f = (Field) d.GetField("fie\u2C77ld");
+							Assert.AreEqual("field with non-ascii name", f.StringValue());
+						}
+					}
+				}
+				// Only ID 7 is deleted
+				else
+					Assert.AreEqual(7, i);
+			}
+			
+			ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
+			
+			// First document should be #21 since it's norm was
+			// increased:
 			Document d2 = searcher.Doc(hits[0].doc);
 			Assert.AreEqual("21", d2.Get("id"), "didn't get the right document first");
 			
-            TestHits(hits, 34, searcher.GetIndexReader());
-
-            if (!oldName.StartsWith("19.") &&
-                !oldName.StartsWith("20.") &&
-                !oldName.StartsWith("21.") &&
-                !oldName.StartsWith("22."))
-            {
-                // Test on indices >= 2.3
-                hits = searcher.Search(new TermQuery(new Term("utf8", "\u0000")), null, 1000).scoreDocs;
-                Assert.AreEqual(34, hits.Length);
-                hits = searcher.Search(new TermQuery(new Term("utf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne")), null, 1000).scoreDocs;
-                Assert.AreEqual(34, hits.Length);
-                hits = searcher.Search(new TermQuery(new Term("utf8", "ab\ud917\udc17cd")), null, 1000).scoreDocs;
-                Assert.AreEqual(34, hits.Length);
-            }
+			TestHits(hits, 34, searcher.GetIndexReader());
+			
+			if (!oldName.StartsWith("19.") && !oldName.StartsWith("20.") && !oldName.StartsWith("21.") && !oldName.StartsWith("22."))
+			{
+				// Test on indices >= 2.3
+				hits = searcher.Search(new TermQuery(new Term("utf8", "\u0000")), null, 1000).scoreDocs;
+				Assert.AreEqual(34, hits.Length);
+				hits = searcher.Search(new TermQuery(new Term("utf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne")), null, 1000).scoreDocs;
+				Assert.AreEqual(34, hits.Length);
+				hits = searcher.Search(new TermQuery(new Term("utf8", "ab\ud917\udc17cd")), null, 1000).scoreDocs;
+				Assert.AreEqual(34, hits.Length);
+			}
 			
 			searcher.Close();
 			dir.Close();
 		}
 		
+		private int Compare(System.String name, System.String v)
+		{
+			int v0 = System.Int32.Parse(name.Substring(0, (2) - (0)));
+			int v1 = System.Int32.Parse(v);
+			return v0 - v1;
+		}
+		
 		/* Open pre-lockless index, add docs, do a delete &
 		* setNorm, and search */
 		public virtual void  ChangeIndexWithAdds(System.String dirName, bool autoCommit)
 		{
-			
+			System.String origDirName = dirName;
 			dirName = FullDir(dirName);
 			
-			Directory dir = FSDirectory.GetDirectory(dirName);
+			Directory dir = FSDirectory.Open(new System.IO.FileInfo(dirName));
 			
 			// open writer
 			IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false);
@@ -285,7 +288,16 @@
 			}
 			
 			// make sure writer sees right total -- writer seems not to know about deletes in .del?
-			Assert.AreEqual(45, writer.DocCount(), "wrong doc count");
+			int expected;
+			if (Compare(origDirName, "24") < 0)
+			{
+				expected = 45;
+			}
+			else
+			{
+				expected = 46;
+			}
+			Assert.AreEqual(expected, writer.DocCount(), "wrong doc count");
 			writer.Close();
 			
 			// make sure searching sees right # hits
@@ -293,7 +305,7 @@
 			ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
 			Document d = searcher.Doc(hits[0].doc);
 			Assert.AreEqual("21", d.Get("id"), "wrong first document");
-            TestHits(hits, 44, searcher.GetIndexReader());
+			TestHits(hits, 44, searcher.GetIndexReader());
 			searcher.Close();
 			
 			// make sure we can do delete & setNorm against this
@@ -311,7 +323,7 @@
 			Assert.AreEqual(43, hits.Length, "wrong number of hits");
 			d = searcher.Doc(hits[0].doc);
 			Assert.AreEqual("22", d.Get("id"), "wrong first document");
-            TestHits(hits, 43, searcher.GetIndexReader());
+			TestHits(hits, 43, searcher.GetIndexReader());
 			searcher.Close();
 			
 			// optimize
@@ -323,7 +335,7 @@
 			hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
 			Assert.AreEqual(43, hits.Length, "wrong number of hits");
 			d = searcher.Doc(hits[0].doc);
-            TestHits(hits, 43, searcher.GetIndexReader());
+			TestHits(hits, 43, searcher.GetIndexReader());
 			Assert.AreEqual("22", d.Get("id"), "wrong first document");
 			searcher.Close();
 			
@@ -332,17 +344,17 @@
 		
 		/* Open pre-lockless index, add docs, do a delete &
 		* setNorm, and search */
-		public virtual void ChangeIndexNoAdds(System.String dirName, bool autoCommit)
+		public virtual void  ChangeIndexNoAdds(System.String dirName, bool autoCommit)
 		{
 			
 			dirName = FullDir(dirName);
 			
-			Directory dir = FSDirectory.GetDirectory(dirName);
+			Directory dir = FSDirectory.Open(new System.IO.FileInfo(dirName));
 			
 			// make sure searching sees right # hits
 			IndexSearcher searcher = new IndexSearcher(dir);
-            ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
-            Assert.AreEqual(34, hits.Length, "wrong number of hits");
+			ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
+			Assert.AreEqual(34, hits.Length, "wrong number of hits");
 			Document d = searcher.Doc(hits[0].doc);
 			Assert.AreEqual("21", d.Get("id"), "wrong first document");
 			searcher.Close();
@@ -358,12 +370,12 @@
 			
 			// make sure they "took":
 			searcher = new IndexSearcher(dir);
-            hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
-            Assert.AreEqual(33, hits.Length, "wrong number of hits");
-            d = searcher.Doc(hits[0].doc);
+			hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
+			Assert.AreEqual(33, hits.Length, "wrong number of hits");
+			d = searcher.Doc(hits[0].doc);
 			Assert.AreEqual("22", d.Get("id"), "wrong first document");
-            TestHits(hits, 33, searcher.GetIndexReader());
-            searcher.Close();
+			TestHits(hits, 33, searcher.GetIndexReader());
+			searcher.Close();
 			
 			// optimize
 			IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false);
@@ -371,12 +383,12 @@
 			writer.Close();
 			
 			searcher = new IndexSearcher(dir);
-            hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
-            Assert.AreEqual(33, hits.Length, "wrong number of hits");
-            d = searcher.Doc(hits[0].doc);
-            Assert.AreEqual("22", d.Get("id"), "wrong first document");
-            TestHits(hits, 33, searcher.GetIndexReader());
-            searcher.Close();
+			hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
+			Assert.AreEqual(33, hits.Length, "wrong number of hits");
+			d = searcher.Doc(hits[0].doc);
+			Assert.AreEqual("22", d.Get("id"), "wrong first document");
+			TestHits(hits, 33, searcher.GetIndexReader());
+			searcher.Close();
 			
 			dir.Close();
 		}
@@ -388,10 +400,10 @@
 			
 			dirName = FullDir(dirName);
 			
-			Directory dir = FSDirectory.GetDirectory(dirName);
+			Directory dir = FSDirectory.Open(new System.IO.FileInfo(dirName));
 			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetUseCompoundFile(doCFS);
-            writer.SetMaxBufferedDocs(10);
+			writer.SetMaxBufferedDocs(10);
 			
 			for (int i = 0; i < 35; i++)
 			{
@@ -400,6 +412,13 @@
 			Assert.AreEqual(35, writer.DocCount(), "wrong doc count");
 			writer.Close();
 			
+			// open fresh writer so we get no prx file in the added segment
+			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetUseCompoundFile(doCFS);
+			writer.SetMaxBufferedDocs(10);
+			AddNoProxDoc(writer);
+			writer.Close();
+			
 			// Delete one doc so we get a .del file:
 			IndexReader reader = IndexReader.Open(dir);
 			Term searchTerm = new Term("id", "7");
@@ -412,7 +431,7 @@
 		}
 		
 		/* Verifies that the expected file names were produced */
-
+		
 		[Test]
 		public virtual void  TestExactFileNames()
 		{
@@ -425,7 +444,7 @@
 				
 				try
 				{
-					Directory dir = FSDirectory.GetDirectory(FullDir(outputDir));
+					Directory dir = FSDirectory.Open(new System.IO.FileInfo(FullDir(outputDir)));
 					
 					bool autoCommit = 0 == pass;
 					
@@ -435,7 +454,7 @@
 					{
 						AddDoc(writer, i);
 					}
-					Assert.AreEqual(35, writer.DocCount());
+					Assert.AreEqual(35, writer.DocCount(), "wrong doc count");
 					writer.Close();
 					
 					// Delete one doc so we get a .del file:
@@ -460,7 +479,7 @@
 					for (int i = 0; i < fieldInfos.Size(); i++)
 					{
 						FieldInfo fi = fieldInfos.FieldInfo(i);
-						if (fi.Name_ForNUnitTest.Equals("content"))
+						if (fi.name_ForNUnit.Equals("content"))
 						{
 							contentFieldIndex = i;
 							break;
@@ -473,10 +492,10 @@
 					System.String[] expected;
 					expected = new System.String[]{"_0.cfs", "_0_1.del", "_0_1.s" + contentFieldIndex, "segments_3", "segments.gen"};
 					
-					System.String[] actual = dir.List();
+					System.String[] actual = dir.ListAll();
 					System.Array.Sort(expected);
 					System.Array.Sort(actual);
-					if (!ArrayEquals(expected, actual))
+					if (!SupportClass.CollectionsHelper.Equals(expected, actual))
 					{
 						Assert.Fail("incorrect filenames in index: expected:\n    " + AsString(expected) + "\n  actual:\n    " + AsString(actual));
 					}
@@ -505,13 +524,26 @@
 		
 		private void  AddDoc(IndexWriter writer, int id)
 		{
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
 			doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.NOT_ANALYZED));
-            doc.Add(new Field("autf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
-            doc.Add(new Field("utf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
-            doc.Add(new Field("content2", "here is more content with aaa aaa aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
-            writer.AddDocument(doc);
+			doc.Add(new Field("autf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+			doc.Add(new Field("utf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+			doc.Add(new Field("content2", "here is more content with aaa aaa aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+			doc.Add(new Field("fie\u2C77ld", "field with non-ascii name", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+			writer.AddDocument(doc);
+		}
+		
+		private void  AddNoProxDoc(IndexWriter writer)
+		{
+			Document doc = new Document();
+			Field f = new Field("content3", "aaa", Field.Store.YES, Field.Index.ANALYZED);
+			f.SetOmitTf(true);
+			doc.Add(f);
+			f = new Field("content4", "aaa", Field.Store.YES, Field.Index.NO);
+			f.SetOmitTf(true);
+			doc.Add(f);
+			writer.AddDocument(doc);
 		}
 		
 		private void  RmDir(System.String dir)
@@ -524,20 +556,20 @@
 				tmpBool = System.IO.Directory.Exists(fileDir.FullName);
 			if (tmpBool)
 			{
-				System.String[] files = System.IO.Directory.GetFileSystemEntries(fileDir.FullName);
+				System.IO.FileInfo[] files = SupportClass.FileSupport.GetFiles(fileDir);
 				if (files != null)
 				{
 					for (int i = 0; i < files.Length; i++)
 					{
 						bool tmpBool2;
-						if (System.IO.File.Exists(files[i]))
+						if (System.IO.File.Exists(files[i].FullName))
 						{
-							System.IO.File.Delete(files[i]);
+							System.IO.File.Delete(files[i].FullName);
 							tmpBool2 = true;
 						}
-						else if (System.IO.Directory.Exists(files[i]))
+						else if (System.IO.Directory.Exists(files[i].FullName))
 						{
-							System.IO.Directory.Delete(files[i]);
+							System.IO.Directory.Delete(files[i].FullName);
 							tmpBool2 = true;
 						}
 						else
@@ -566,29 +598,5 @@
 		{
 			return new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", ""), dirName)).FullName;
 		}
-
-		public static bool ArrayEquals(System.Array array1, System.Array array2)
-		{
-			bool result = false;
-			if ((array1 == null) && (array2 == null))
-				result = true;
-			else if ((array1 != null) && (array2 != null))
-			{
-				if (array1.Length == array2.Length)
-				{
-					int length = array1.Length;
-					result = true;
-					for (int index = 0; index < length; index++)
-					{
-						if (!(array1.GetValue(index).Equals(array2.GetValue(index))))
-						{
-							result = false;
-							break;
-						}
-					}
-				}
-			}
-			return result;
-		}
 	}
 }
\ No newline at end of file