You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucenenet.apache.org by do...@apache.org on 2009/07/29 20:04:24 UTC

svn commit: r798995 [24/35] - in /incubator/lucene.net/trunk/C#/src: Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Analysis/Standard/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/QueryParser/ Lucene.Net/Search/ Lucene.Net/Search/Function/ Lucene.Net...

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestBackwardsCompatibility.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestBackwardsCompatibility.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestBackwardsCompatibility.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestBackwardsCompatibility.cs Wed Jul 29 18:04:12 2009
@@ -19,15 +19,16 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using Directory = Lucene.Net.Store.Directory;
-using FSDirectory = Lucene.Net.Store.FSDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using Hits = Lucene.Net.Search.Hits;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using ScoreDoc = Lucene.Net.Search.ScoreDoc;
 using TermQuery = Lucene.Net.Search.TermQuery;
+using Directory = Lucene.Net.Store.Directory;
+using FSDirectory = Lucene.Net.Store.FSDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using _TestUtil = Lucene.Net.Util._TestUtil;
 
 namespace Lucene.Net.Index
 {
@@ -40,16 +41,20 @@
 	public class TestBackwardsCompatibility : LuceneTestCase
 	{
 		
-		// Uncomment these cases & run in a pre-lockless checkout
-		// to create indices:
+		// Uncomment these cases & run them on an older Lucene version
+        // to generate an index to test backwards compatibility.
+        // Then cd to build/test/index.cfs and run "zip index.<VERSION>.cfs.zip *";
+        // cd to build/test/index.nocfs and run "zip index.<VERSION>.nocfs.zip *".
+        // Then move those 2 zip files to your trunk checkout and add them to the
+        // oldNames array.
 		
 		/*
 		public void testCreatePreLocklessCFS() throws IOException {
-		CreateIndex("src/test/org/apache/lucene/index/index.prelockless.cfs", true);
+		CreateIndex("index.cfs", true);
 		}
 		
 		public void testCreatePreLocklessNoCFS() throws IOException {
-		CreateIndex("src/test/org/apache/lucene/index/index.prelockless.nocfs", false);
+		CreateIndex("index.nocfs", false);
 		}
 		*/
 		
@@ -108,16 +113,46 @@
 			RmDir(dirName);
 		}
 		
-		internal System.String[] oldNames = new System.String[]{"prelockless.cfs", "prelockless.nocfs", "presharedstores.cfs", "presharedstores.nocfs"};
-		
-		[Test]
-		public virtual void  TestSearchOldIndex()
+		internal readonly string[] oldNames = new string[] {
+            "19.cfs",    
+            "19.nocfs",    
+            "20.cfs",    
+            "20.nocfs",    
+            "21.cfs",    
+            "21.nocfs",    
+            "22.cfs",    
+            "22.nocfs",    
+            "23.cfs",    
+            "23.nocfs",    
+        };
+
+        [Test]
+        public void TestOptimizeOldIndex()
+        {
+            for (int i = 0; i < oldNames.Length; i++)
+            {
+                string dirName = @"Index\index." + oldNames[i];
+                Unzip(dirName, oldNames[i]);
+                string fullPath = FullDir(oldNames[i]);
+                Directory dir = FSDirectory.GetDirectory(fullPath);
+                IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+                w.Optimize();
+                w.Close();
+
+                _TestUtil.CheckIndex(dir);
+                dir.Close();
+                RmDir(oldNames[i]);
+            }
+        }
+
+        [Test]
+        public virtual void TestSearchOldIndex()
 		{
 			for (int i = 0; i < oldNames.Length; i++)
 			{
 				System.String dirName = @"Index\index." + oldNames[i];
 				Unzip(dirName, oldNames[i]);
-				SearchIndex(oldNames[i]);
+				SearchIndex(oldNames[i], oldNames[i]);
 				RmDir(oldNames[i]);
 			}
 		}
@@ -153,8 +188,19 @@
 				RmDir(oldNames[i]);
 			}
 		}
-		
-		public virtual void  SearchIndex(System.String dirName)
+
+        private void TestHits(ScoreDoc[] hits, int expectedCount, IndexReader reader)
+        {
+            int hitCount = hits.Length;
+            Assert.AreEqual(expectedCount, hitCount, "wrong number of hits");
+            for (int i = 0; i < hitCount; i++)
+            {
+                reader.Document(hits[i].doc);
+                reader.GetTermFreqVectors(hits[i].doc);
+            }
+        }
+
+        public virtual void SearchIndex(string dirName, string oldName)
 		{
 			//QueryParser parser = new QueryParser("contents", new WhitespaceAnalyzer());
 			//Query query = parser.parse("handle:1");
@@ -163,13 +209,58 @@
 			
 			Directory dir = FSDirectory.GetDirectory(dirName);
 			IndexSearcher searcher = new IndexSearcher(dir);
-			
-			Hits hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
-			Assert.AreEqual(34, hits.Length());
-			Document d = hits.Doc(0);
-			
+            IndexReader reader = searcher.GetIndexReader();
+
+            _TestUtil.CheckIndex(dir);
+
+            for (int i = 0; i < 35; i++)
+            {
+                if (!reader.IsDeleted(i))
+                {
+                    Document d = reader.Document(i);
+                    System.Collections.IList fields = d.GetFields();
+                    if (oldName.StartsWith("23."))
+                    {
+                        Assert.AreEqual(4, fields.Count);
+                        Field f = (Field)d.GetField("id");
+                        Assert.AreEqual("" + i, f.StringValue());
+
+                        f = (Field)d.GetField("utf8");
+                        Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.StringValue());
+
+                        f = (Field)d.GetField("autf8");
+                        Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.StringValue());
+
+                        f = (Field)d.GetField("content2");
+                        Assert.AreEqual("here is more content with aaa aaa aaa", f.StringValue());
+                    }
+                }
+                else
+                    // only ID 7 is deleted
+                    Assert.AreEqual(7, i);
+            }
+
+            ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
+
 			// First document should be #21 since it's norm was increased:
-			Assert.AreEqual("21", d.Get("id"), "didn't get the right document first");
+			Document d2 = searcher.Doc(hits[0].doc);
+			Assert.AreEqual("21", d2.Get("id"), "didn't get the right document first");
+			
+            TestHits(hits, 34, searcher.GetIndexReader());
+
+            if (!oldName.StartsWith("19.") &&
+                !oldName.StartsWith("20.") &&
+                !oldName.StartsWith("21.") &&
+                !oldName.StartsWith("22."))
+            {
+                // Test on indices >= 2.3
+                hits = searcher.Search(new TermQuery(new Term("utf8", "\u0000")), null, 1000).scoreDocs;
+                Assert.AreEqual(34, hits.Length);
+                hits = searcher.Search(new TermQuery(new Term("utf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne")), null, 1000).scoreDocs;
+                Assert.AreEqual(34, hits.Length);
+                hits = searcher.Search(new TermQuery(new Term("utf8", "ab\ud917\udc17cd")), null, 1000).scoreDocs;
+                Assert.AreEqual(34, hits.Length);
+            }
 			
 			searcher.Close();
 			dir.Close();
@@ -199,10 +290,10 @@
 			
 			// make sure searching sees right # hits
 			IndexSearcher searcher = new IndexSearcher(dir);
-			Hits hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
-			Assert.AreEqual(44, hits.Length(), "wrong number of hits");
-			Document d = hits.Doc(0);
+			ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
+			Document d = searcher.Doc(hits[0].doc);
 			Assert.AreEqual("21", d.Get("id"), "wrong first document");
+            TestHits(hits, 44, searcher.GetIndexReader());
 			searcher.Close();
 			
 			// make sure we can do delete & setNorm against this
@@ -216,10 +307,11 @@
 			
 			// make sure they "took":
 			searcher = new IndexSearcher(dir);
-			hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
-			Assert.AreEqual(43, hits.Length(), "wrong number of hits");
-			d = hits.Doc(0);
+			hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
+			Assert.AreEqual(43, hits.Length, "wrong number of hits");
+			d = searcher.Doc(hits[0].doc);
 			Assert.AreEqual("22", d.Get("id"), "wrong first document");
+            TestHits(hits, 43, searcher.GetIndexReader());
 			searcher.Close();
 			
 			// optimize
@@ -228,9 +320,10 @@
 			writer.Close();
 			
 			searcher = new IndexSearcher(dir);
-			hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
-			Assert.AreEqual(43, hits.Length(), "wrong number of hits");
-			d = hits.Doc(0);
+			hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
+			Assert.AreEqual(43, hits.Length, "wrong number of hits");
+			d = searcher.Doc(hits[0].doc);
+            TestHits(hits, 43, searcher.GetIndexReader());
 			Assert.AreEqual("22", d.Get("id"), "wrong first document");
 			searcher.Close();
 			
@@ -248,9 +341,9 @@
 			
 			// make sure searching sees right # hits
 			IndexSearcher searcher = new IndexSearcher(dir);
-			Hits hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
-			Assert.AreEqual(34, hits.Length(), "wrong number of hits");
-			Document d = hits.Doc(0);
+            ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
+            Assert.AreEqual(34, hits.Length, "wrong number of hits");
+			Document d = searcher.Doc(hits[0].doc);
 			Assert.AreEqual("21", d.Get("id"), "wrong first document");
 			searcher.Close();
 			
@@ -265,11 +358,12 @@
 			
 			// make sure they "took":
 			searcher = new IndexSearcher(dir);
-			hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
-			Assert.AreEqual(33, hits.Length(), "wrong number of hits");
-			d = hits.Doc(0);
+            hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
+            Assert.AreEqual(33, hits.Length, "wrong number of hits");
+            d = searcher.Doc(hits[0].doc);
 			Assert.AreEqual("22", d.Get("id"), "wrong first document");
-			searcher.Close();
+            TestHits(hits, 33, searcher.GetIndexReader());
+            searcher.Close();
 			
 			// optimize
 			IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false);
@@ -277,11 +371,12 @@
 			writer.Close();
 			
 			searcher = new IndexSearcher(dir);
-			hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
-			Assert.AreEqual(33, hits.Length(), "wrong number of hits");
-			d = hits.Doc(0);
-			Assert.AreEqual("22", d.Get("id"), "wrong first document");
-			searcher.Close();
+            hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
+            Assert.AreEqual(33, hits.Length, "wrong number of hits");
+            d = searcher.Doc(hits[0].doc);
+            Assert.AreEqual("22", d.Get("id"), "wrong first document");
+            TestHits(hits, 33, searcher.GetIndexReader());
+            searcher.Close();
 			
 			dir.Close();
 		}
@@ -294,8 +389,9 @@
 			dirName = FullDir(dirName);
 			
 			Directory dir = FSDirectory.GetDirectory(dirName);
-			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetUseCompoundFile(doCFS);
+            writer.SetMaxBufferedDocs(10);
 			
 			for (int i = 0; i < 35; i++)
 			{
@@ -335,7 +431,6 @@
 					
 					IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
 					writer.SetRAMBufferSizeMB(16.0);
-					//IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
 					for (int i = 0; i < 35; i++)
 					{
 						AddDoc(writer, i);
@@ -376,10 +471,7 @@
 					
 					// Now verify file names:
 					System.String[] expected;
-					expected = new System.String[]{"_0.cfs", "_0_1.del", "_0_1.s" + contentFieldIndex, "segments_4", "segments.gen"};
-					
-					if (!autoCommit)
-						expected[3] = "segments_3";
+					expected = new System.String[]{"_0.cfs", "_0_1.del", "_0_1.s" + contentFieldIndex, "segments_3", "segments.gen"};
 					
 					System.String[] actual = dir.List();
 					System.Array.Sort(expected);
@@ -414,9 +506,12 @@
 		private void  AddDoc(IndexWriter writer, int id)
 		{
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.TOKENIZED));
-			doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.UN_TOKENIZED));
-			writer.AddDocument(doc);
+			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
+			doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.NOT_ANALYZED));
+            doc.Add(new Field("autf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+            doc.Add(new Field("utf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+            doc.Add(new Field("content2", "here is more content with aaa aaa aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+            writer.AddDocument(doc);
 		}
 		
 		private void  RmDir(System.String dir)

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestByteSlices.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestByteSlices.cs?rev=798995&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestByteSlices.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestByteSlices.cs Wed Jul 29 18:04:12 2009
@@ -0,0 +1,132 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using NUnit.Framework;
+
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Index
+{
+    public class TestByteSlices : LuceneTestCase
+    {
+
+        private class ByteBlockAllocator : ByteBlockPool.Allocator
+        {
+            System.Collections.Generic.List<byte[]> freeByteBlocks = new System.Collections.Generic.List<byte[]>();
+
+            /* Allocate another byte[] from the shared pool */
+            override public byte[] GetByteBlock(bool trackAllocations)
+            {
+                lock (this)
+                {
+                    int size = freeByteBlocks.Count;
+                    byte[] b;
+                    if (0 == size)
+                        b = new byte[DocumentsWriter.BYTE_BLOCK_SIZE_For_NUnit_Test];
+                    else
+                    {
+                        b = freeByteBlocks[size - 1];
+                        freeByteBlocks.RemoveAt(size - 1);
+                    }
+                    return b;
+                }
+            }
+
+            /* Return a byte[] to the pool */
+            override public void RecycleByteBlocks(byte[][] blocks, int start, int end)
+            {
+                lock (this)
+                {
+                    for (int i = start; i < end; i++)
+                        freeByteBlocks.Add(blocks[i]);
+                }
+            }
+        }
+
+        [Test]
+        public void TestBasic()
+        {
+            ByteBlockPool pool = new ByteBlockPool(new ByteBlockAllocator(), false);
+
+            int NUM_STREAM = 25;
+
+            ByteSliceWriter writer = new ByteSliceWriter(pool);
+
+            int[] starts = new int[NUM_STREAM];
+            int[] uptos = new int[NUM_STREAM];
+            int[] counters = new int[NUM_STREAM];
+
+            System.Random r = new System.Random(1);
+
+            ByteSliceReader reader = new ByteSliceReader();
+
+            for (int ti = 0; ti < 100; ti++)
+            {
+
+                for (int stream = 0; stream < NUM_STREAM; stream++)
+                {
+                    starts[stream] = -1;
+                    counters[stream] = 0;
+                }
+
+                bool debug = false;
+
+                for (int iter = 0; iter < 10000; iter++)
+                {
+                    int stream = r.Next(NUM_STREAM);
+                    if (debug)
+                        System.Console.WriteLine("write stream=" + stream);
+
+                    if (starts[stream] == -1)
+                    {
+                        int spot = pool.NewSlice(ByteBlockPool.FIRST_LEVEL_SIZE_For_NUnit_Test);
+                        starts[stream] = uptos[stream] = spot + pool.byteOffset;
+                        if (debug)
+                            System.Console.WriteLine("  init to " + starts[stream]);
+                    }
+
+                    writer.Init(uptos[stream]);
+                    int numValue = r.Next(20);
+                    for (int j = 0; j < numValue; j++)
+                    {
+                        if (debug)
+                            System.Console.WriteLine("    write " + (counters[stream] + j));
+                        writer.WriteVInt(counters[stream] + j);
+                        //writer.writeVInt(ti);
+                    }
+                    counters[stream] += numValue;
+                    uptos[stream] = writer.GetAddress();
+                    if (debug)
+                        System.Console.WriteLine("    addr now " + uptos[stream]);
+                }
+
+                for (int stream = 0; stream < NUM_STREAM; stream++)
+                {
+                    if (debug)
+                        System.Console.WriteLine("  stream=" + stream + " count=" + counters[stream]);
+
+                    if (starts[stream] != uptos[stream])
+                    {
+                        reader.Init(pool, starts[stream], uptos[stream]);
+                        for (int j = 0; j < counters[stream]; j++)
+                            Assert.AreEqual(j, reader.ReadVInt());
+                        //assertEquals(ti, reader.readVInt());
+                    }
+                }
+
+                pool.Reset();
+            }
+        }
+    }
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestCheckIndex.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestCheckIndex.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestCheckIndex.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestCheckIndex.cs Wed Jul 29 18:04:12 2009
@@ -36,10 +36,10 @@
 		public virtual void  TestDeletedDocs()
 		{
 			MockRAMDirectory dir = new MockRAMDirectory();
-			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetMaxBufferedDocs(2);
 			Document doc = new Document();
-			doc.Add(new Field("field", "aaa", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+			doc.Add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
 			for (int i = 0; i < 19; i++)
 			{
 				writer.AddDocument(doc);
@@ -48,11 +48,21 @@
 			IndexReader reader = IndexReader.Open(dir);
 			reader.DeleteDocument(5);
 			reader.Close();
-			
-			CheckIndex.out_Renamed = new System.IO.StringWriter();
-			bool condition = CheckIndex.Check(dir, false);
-			String message = CheckIndex.out_Renamed.ToString();
-			Assert.IsTrue(condition, message);
+
+            System.IO.StringWriter sw = new System.IO.StringWriter();
+            CheckIndex checker = new CheckIndex(dir);
+            checker.SetInfoStream(sw);
+            CheckIndex.Status indexStatus = checker.CheckIndex_Renamed();
+            if (!indexStatus.clean)
+            {
+                System.Console.WriteLine("CheckIndex failed");
+                System.Console.WriteLine(sw.ToString());
+                Assert.Fail();
+            }
+            System.Collections.Generic.List<object> onlySegments = new System.Collections.Generic.List<object>();
+            onlySegments.Add("_0");
+
+            Assert.IsTrue(checker.CheckIndex_Renamed(onlySegments).clean);
 		}
 	}
 }
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestCloseableThreadLocal.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestCloseableThreadLocal.cs?rev=798995&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestCloseableThreadLocal.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestCloseableThreadLocal.cs Wed Jul 29 18:04:12 2009
@@ -0,0 +1,29 @@
+using NUnit.Framework;
+
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using CloseableThreadLocal = Lucene.Net.Util.CloseableThreadLocal;
+
+namespace Lucene.Net.Index
+{
+    [TestFixture]
+    public class TestCloseableThreadLocal
+    {
+        public const string TEST_VALUE = "initvaluetest";
+
+        [Test]
+        public void TestInitValue()
+        {
+            InitValueThreadLocal tl = new InitValueThreadLocal();
+            string str = (string)tl.Get();
+            Assert.AreEqual(TEST_VALUE, str);
+        }
+
+        public class InitValueThreadLocal : CloseableThreadLocal
+        {
+            override protected object InitialValue()
+            {
+                return TEST_VALUE;
+            }
+        }
+    }
+}

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestCompoundFile.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestCompoundFile.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestCompoundFile.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestCompoundFile.cs Wed Jul 29 18:04:12 2009
@@ -672,4 +672,4 @@
 			}
 		}
 	}
-}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestConcurrentMergeScheduler.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestConcurrentMergeScheduler.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestConcurrentMergeScheduler.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestConcurrentMergeScheduler.cs Wed Jul 29 18:04:12 2009
@@ -73,12 +73,12 @@
 			FailOnlyOnFlush failure = new FailOnlyOnFlush();
 			directory.FailOn(failure);
 			
-			IndexWriter writer = new IndexWriter(directory, ANALYZER, true);
+			IndexWriter writer = new IndexWriter(directory, true, ANALYZER, true);
 			ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
 			writer.SetMergeScheduler(cms);
 			writer.SetMaxBufferedDocs(2);
 			Document doc = new Document();
-			Field idField = new Field("id", "", Field.Store.YES, Field.Index.UN_TOKENIZED);
+			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
 			doc.Add(idField);
 			for (int i = 0; i < 10; i++)
 			{
@@ -114,10 +114,9 @@
 		[Test]
 		public virtual void  TestDeleteMerging()
 		{
-			
 			RAMDirectory directory = new MockRAMDirectory();
 			
-			IndexWriter writer = new IndexWriter(directory, ANALYZER, true);
+			IndexWriter writer = new IndexWriter(directory, true, ANALYZER, true);
 			ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
 			writer.SetMergeScheduler(cms);
 			
@@ -130,7 +129,7 @@
 			mp.SetMinMergeDocs(1000);
 			
 			Document doc = new Document();
-			Field idField = new Field("id", "", Field.Store.YES, Field.Index.UN_TOKENIZED);
+			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
 			doc.Add(idField);
 			for (int i = 0; i < 10; i++)
 			{
@@ -179,7 +178,7 @@
 					for (int j = 0; j < 21; j++)
 					{
 						Document doc = new Document();
-						doc.Add(new Field("content", "a b c", Field.Store.NO, Field.Index.TOKENIZED));
+						doc.Add(new Field("content", "a b c", Field.Store.NO, Field.Index.ANALYZED));
 						writer.AddDocument(doc);
 					}
 					
@@ -202,7 +201,7 @@
 			RAMDirectory directory = new MockRAMDirectory();
 			
 			Document doc = new Document();
-			Field idField = new Field("id", "", Field.Store.YES, Field.Index.UN_TOKENIZED);
+			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
 			doc.Add(idField);
 			
 			for (int pass = 0; pass < 2; pass++)

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestCrash.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestCrash.cs?rev=798995&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestCrash.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestCrash.cs Wed Jul 29 18:04:12 2009
@@ -0,0 +1,201 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using NUnit.Framework;
+
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using NoLockFactory = Lucene.Net.Store.NoLockFactory;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+
+namespace Lucene.Net.Index
+{
+    [TestFixture]
+    public class TestCrash : LuceneTestCase
+    {
+
+        private IndexWriter InitIndex()
+        {
+            return InitIndex(new MockRAMDirectory());
+        }
+
+        private IndexWriter InitIndex(MockRAMDirectory dir)
+        {
+            dir.SetLockFactory(NoLockFactory.GetNoLockFactory());
+
+            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer());
+            //writer.setMaxBufferedDocs(2);
+            writer.SetMaxBufferedDocs(10);
+            ((ConcurrentMergeScheduler)writer.GetMergeScheduler()).SetSuppressExceptions_ForNUnitTest();
+
+            Document doc = new Document();
+            doc.Add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED));
+            doc.Add(new Field("id", "0", Field.Store.YES, Field.Index.ANALYZED));
+            for (int i = 0; i < 157; i++)
+                writer.AddDocument(doc);
+
+            return writer;
+        }
+
+        private void Crash(IndexWriter writer)
+        {
+            MockRAMDirectory dir = (MockRAMDirectory)writer.GetDirectory();
+            ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler)writer.GetMergeScheduler();
+            dir.Crash();
+            cms.Sync();
+            dir.ClearCrash();
+        }
+
+        [Test]
+        public void TestCrashWhileIndexing()
+        {
+            IndexWriter writer = InitIndex();
+            MockRAMDirectory dir = (MockRAMDirectory)writer.GetDirectory();
+            Crash(writer);
+            IndexReader reader = IndexReader.Open(dir);
+            Assert.IsTrue(reader.NumDocs() < 157);
+        }
+
+        [Test]
+        public void TestWriterAfterCrash()
+        {
+            IndexWriter writer = InitIndex();
+            MockRAMDirectory dir = (MockRAMDirectory)writer.GetDirectory();
+            dir.SetPreventDoubleWrite(false);
+            Crash(writer);
+            writer = InitIndex(dir);
+            writer.Close();
+
+            IndexReader reader = IndexReader.Open(dir);
+            Assert.IsTrue(reader.NumDocs() < 314);
+        }
+
+        [Test]
+        public void TestCrashAfterReopen()
+        {
+            IndexWriter writer = InitIndex();
+            MockRAMDirectory dir = (MockRAMDirectory)writer.GetDirectory();
+            writer.Close();
+            writer = InitIndex(dir);
+            Assert.AreEqual(314, writer.DocCount());
+            Crash(writer);
+
+            /*
+            System.out.println("\n\nTEST: open reader");
+            String[] l = dir.list();
+            Arrays.sort(l);
+            for(int i=0;i<l.length;i++)
+              System.out.println("file " + i + " = " + l[i] + " " +
+            dir.fileLength(l[i]) + " bytes");
+            */
+
+            IndexReader reader = IndexReader.Open(dir);
+            Assert.IsTrue(reader.NumDocs() >= 157);
+        }
+
+        [Test]
+        public void TestCrashAfterClose()
+        {
+
+            IndexWriter writer = InitIndex();
+            MockRAMDirectory dir = (MockRAMDirectory)writer.GetDirectory();
+
+            writer.Close();
+            dir.Crash();
+
+            /*
+            String[] l = dir.list();
+            Arrays.sort(l);
+            for(int i=0;i<l.length;i++)
+              System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
+            */
+
+            IndexReader reader = IndexReader.Open(dir);
+            Assert.AreEqual(157, reader.NumDocs());
+        }
+
+        [Test]
+        public void TestCrashAfterCloseNoWait()
+        {
+
+            IndexWriter writer = InitIndex();
+            MockRAMDirectory dir = (MockRAMDirectory)writer.GetDirectory();
+
+            writer.Close(false);
+
+            dir.Crash();
+
+            /*
+            String[] l = dir.list();
+            Arrays.sort(l);
+            for(int i=0;i<l.length;i++)
+              System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
+            */
+            IndexReader reader = IndexReader.Open(dir);
+            Assert.AreEqual(157, reader.NumDocs());
+        }
+
+        [Test]
+        public void TestCrashReaderDeletes()
+        {
+
+            IndexWriter writer = InitIndex();
+            MockRAMDirectory dir = (MockRAMDirectory)writer.GetDirectory();
+
+            writer.Close(false);
+            IndexReader reader = IndexReader.Open(dir);
+            reader.DeleteDocument(3);
+
+            dir.Crash();
+
+            /*
+            String[] l = dir.list();
+            Arrays.sort(l);
+            for(int i=0;i<l.length;i++)
+              System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
+            */
+            reader = IndexReader.Open(dir);
+            Assert.AreEqual(157, reader.NumDocs());
+        }
+
+        [Test]
+        public void TestCrashReaderDeletesAfterClose()
+        {
+
+            IndexWriter writer = InitIndex();
+            MockRAMDirectory dir = (MockRAMDirectory)writer.GetDirectory();
+
+            writer.Close(false);
+            IndexReader reader = IndexReader.Open(dir);
+            reader.DeleteDocument(3);
+            reader.Close();
+
+            dir.Crash();
+
+            /*
+            String[] l = dir.list();
+            Arrays.sort(l);
+            for(int i=0;i<l.length;i++)
+              System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
+            */
+            reader = IndexReader.Open(dir);
+            Assert.AreEqual(156, reader.NumDocs());
+        }
+    }
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestDeletionPolicy.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestDeletionPolicy.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestDeletionPolicy.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestDeletionPolicy.cs Wed Jul 29 18:04:12 2009
@@ -19,15 +19,15 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using Directory = Lucene.Net.Store.Directory;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using Hits = Lucene.Net.Search.Hits;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using Query = Lucene.Net.Search.Query;
+using ScoreDoc = Lucene.Net.Search.ScoreDoc;
 using TermQuery = Lucene.Net.Search.TermQuery;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Index
@@ -42,12 +42,24 @@
 	{
 		private void  VerifyCommitOrder(System.Collections.IList commits)
 		{
-			long last = SegmentInfos.GenerationFromSegmentsFileName(((IndexCommitPoint) commits[0]).GetSegmentsFileName());
+            IndexCommit firstCommit = (IndexCommit)commits[0];
+			long last = SegmentInfos.GenerationFromSegmentsFileName(firstCommit.GetSegmentsFileName());
+            Assert.AreEqual(last, firstCommit.GetGeneration());
+            long lastVersion = firstCommit.GetVersion();
+            long lastTimestamp = firstCommit.GetTimestamp();
 			for (int i = 1; i < commits.Count; i++)
 			{
-				long now = SegmentInfos.GenerationFromSegmentsFileName(((IndexCommitPoint) commits[i]).GetSegmentsFileName());
-				Assert.IsTrue(now > last, "SegmentInfos commits are out-of-order");
+                IndexCommit commit = (IndexCommit)commits[i];
+                long now = SegmentInfos.GenerationFromSegmentsFileName(commit.GetSegmentsFileName());
+                long nowVersion = commit.GetVersion();
+                long nowTimestamp = commit.GetTimestamp();
+                Assert.IsTrue(now > last, "SegmentInfos commits are out-of-order");
+                Assert.IsTrue(nowVersion > lastVersion, "SegmentInfos versions are out-of-order");
+                Assert.IsTrue(nowTimestamp >= lastTimestamp, "SegmentInfos timestamps are out-of-order: now=" + nowTimestamp + " vs last=" + lastTimestamp);
+                Assert.AreEqual(now, commit.GetGeneration());
 				last = now;
+                lastVersion = nowVersion;
+                lastTimestamp = nowTimestamp;
 			}
 		}
 		
@@ -68,17 +80,21 @@
 				{
 					return enclosingInstance;
 				}
-				
 			}
 			internal int numOnInit;
 			internal int numOnCommit;
-			public virtual void  OnInit(System.Collections.IList commits)
+            internal Directory dir;
+			public virtual void  OnInit(System.Collections.Generic.List<IndexCommitPoint> commits)
 			{
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				numOnInit++;
 			}
-			public virtual void  OnCommit(System.Collections.IList commits)
+            public virtual void OnCommit(System.Collections.Generic.List<IndexCommitPoint> commits)
 			{
+                IndexCommit lastCommit = (IndexCommit) commits[commits.Count - 1];
+                IndexReader r = IndexReader.Open(dir);
+                Assert.AreEqual(r.IsOptimized(), lastCommit.IsOptimized(), "lastCommit.IsOptimized()=" + lastCommit.IsOptimized() + " vs IndexReader.IsOptimized()=" + r.IsOptimized());
+                r.Close();
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				numOnCommit++;
 			}
@@ -108,25 +124,27 @@
 			}
 			internal int numOnInit;
 			internal int numOnCommit;
-			public virtual void  OnInit(System.Collections.IList commits)
+            public virtual void OnInit(System.Collections.Generic.List<IndexCommitPoint> commits)
 			{
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				numOnInit++;
 				// On init, delete all commit points:
-				System.Collections.IEnumerator it = commits.GetEnumerator();
+				System.Collections.Generic.IEnumerator<IndexCommitPoint> it = commits.GetEnumerator();
 				while (it.MoveNext())
 				{
-					((IndexCommitPoint) it.Current).Delete();
+                    IndexCommit commit = (IndexCommit)it.Current;
+                    commit.Delete();
+                    Assert.IsTrue(commit.IsDeleted());
 				}
 			}
-			public virtual void  OnCommit(System.Collections.IList commits)
+            public virtual void OnCommit(System.Collections.Generic.List<IndexCommitPoint> commits)
 			{
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				int size = commits.Count;
 				// Delete all but last one:
 				for (int i = 0; i < size - 1; i++)
 				{
-					((IndexCommitPoint) commits[i]).Delete();
+					((IndexCommit) commits[i]).Delete();
 				}
 				numOnCommit++;
 			}
@@ -158,16 +176,16 @@
 				InitBlock(enclosingInstance);
 				this.numToKeep = numToKeep;
 			}
-			
-			public virtual void  OnInit(System.Collections.IList commits)
+
+            public virtual void OnInit(System.Collections.Generic.List<IndexCommitPoint> commits)
 			{
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				numOnInit++;
 				// do no deletions on init
 				DoDeletes(commits, false);
 			}
-			
-			public virtual void  OnCommit(System.Collections.IList commits)
+
+            public virtual void OnCommit(System.Collections.Generic.List<IndexCommitPoint> commits)
 			{
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				DoDeletes(commits, true);
@@ -180,7 +198,7 @@
 				// commit:
 				if (isCommit)
 				{
-					System.String fileName = ((IndexCommitPoint) commits[commits.Count - 1]).GetSegmentsFileName();
+					System.String fileName = ((IndexCommit) commits[commits.Count - 1]).GetSegmentsFileName();
 					if (seen.Contains(fileName))
 					{
 						throw new System.SystemException("onCommit was called twice on the same commit point: " + fileName);
@@ -191,7 +209,7 @@
 				int size = commits.Count;
 				for (int i = 0; i < size - numToKeep; i++)
 				{
-					((IndexCommitPoint) commits[i]).Delete();
+					((IndexCommit) commits[i]).Delete();
 					numDelete++;
 				}
 			}
@@ -227,27 +245,27 @@
 				this.dir = dir;
 				this.expirationTimeSeconds = seconds;
 			}
-			
-			public virtual void  OnInit(System.Collections.IList commits)
+
+            public virtual void OnInit(System.Collections.Generic.List<IndexCommitPoint> commits)
 			{
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				OnCommit(commits);
 			}
-			
-			public virtual void  OnCommit(System.Collections.IList commits)
+
+            public virtual void OnCommit(System.Collections.Generic.List<IndexCommitPoint> commits)
 			{
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				
-				IndexCommitPoint lastCommit = (IndexCommitPoint) commits[commits.Count - 1];
+				IndexCommit lastCommit = (IndexCommit) commits[commits.Count - 1];
 				
 				// Any commit older than expireTime should be deleted:
 				double expireTime = dir.FileModified(lastCommit.GetSegmentsFileName()) / 1000.0 - expirationTimeSeconds;
 				
-				System.Collections.IEnumerator it = commits.GetEnumerator();
+				System.Collections.Generic.IEnumerator<IndexCommitPoint> it = commits.GetEnumerator();
 				
 				while (it.MoveNext())
 				{
-					IndexCommitPoint commit = (IndexCommitPoint) it.Current;
+					IndexCommit commit = (IndexCommit) it.Current;
 					double modTime = dir.FileModified(commit.GetSegmentsFileName()) / 1000.0;
 					if (commit != lastCommit && modTime < expireTime)
 					{
@@ -345,13 +363,17 @@
 				KeepAllDeletionPolicy policy = new KeepAllDeletionPolicy(this);
 				
 				Directory dir = new RAMDirectory();
+                policy.dir = dir;
 				
 				IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
 				writer.SetMaxBufferedDocs(10);
 				writer.SetUseCompoundFile(useCompoundFile);
+                writer.SetMergeScheduler(new SerialMergeScheduler());
 				for (int i = 0; i < 107; i++)
 				{
 					AddDoc(writer);
+                    if (autoCommit && i % 10 == 0)
+                        writer.Commit();
 				}
 				writer.Close();
 				
@@ -361,16 +383,29 @@
 				writer.Close();
 				
 				Assert.AreEqual(2, policy.numOnInit);
-				if (autoCommit)
-				{
-					Assert.IsTrue(policy.numOnCommit > 2);
-				}
-				else
-				{
+				if (!autoCommit)
 					// If we are not auto committing then there should
 					// be exactly 2 commits (one per close above):
 					Assert.AreEqual(2, policy.numOnCommit);
-				}
+
+                // Test: ListCommits(Directory)
+                System.Collections.Generic.ICollection<IndexCommitPoint> commits = IndexReader.ListCommits(dir);
+                if (!autoCommit)
+                    // 1 from opening writer + 2 from closing writer
+                    Assert.AreEqual(3, commits.Count);
+                else
+                    // 1 from opening writer + 2 from closing writer
+                    // + 11 from calling writer.Commit() explicitly
+                    Assert.AreEqual(14, commits.Count);
+
+                System.Collections.Generic.IEnumerator<IndexCommitPoint> it = commits.GetEnumerator();
+                // Make sure we can open a reader on each commit
+                while (it.MoveNext())
+                {
+                    IndexCommit commit = (IndexCommit)it.Current;
+                    IndexReader r = IndexReader.Open(commit, null);
+                    r.Close();
+                }
 				
 				// Simplistic check: just verify all segments_N's still
 				// exist, and, I can open a reader on each:
@@ -390,7 +425,7 @@
 						// Open & close a writer and assert that it
 						// actually removed something:
 						int preCount = dir.List().Length;
-						writer = new IndexWriter(dir, false, new WhitespaceAnalyzer(), false, policy);
+						writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.LIMITED);
 						writer.Close();
 						int postCount = dir.List().Length;
 						Assert.IsTrue(postCount < preCount);
@@ -434,16 +469,10 @@
 				writer.Close();
 				
 				Assert.AreEqual(2, policy.numOnInit);
-				if (autoCommit)
-				{
-					Assert.IsTrue(policy.numOnCommit > 2);
-				}
-				else
-				{
+				if (!autoCommit)
 					// If we are not auto committing then there should
 					// be exactly 2 commits (one per close above):
 					Assert.AreEqual(2, policy.numOnCommit);
-				}
 				
 				// Simplistic check: just verify the index is in fact
 				// readable:
@@ -569,8 +598,8 @@
 					reader.DeleteDocument(3 * i + 1);
 					reader.SetNorm(4 * i + 1, "content", 2.0F);
 					IndexSearcher searcher = new IndexSearcher(reader);
-					Hits hits = searcher.Search(query);
-					Assert.AreEqual(16 * (1 + i), hits.Length());
+					ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+					Assert.AreEqual(16 * (1 + i), hits.Length);
 					// this is a commit when autoCommit=false:
 					reader.Close();
 					searcher.Close();
@@ -582,18 +611,12 @@
 				writer.Close();
 				
 				Assert.AreEqual(2 * (N + 2), policy.numOnInit);
-				if (autoCommit)
-				{
-					Assert.IsTrue(policy.numOnCommit > 2 * (N + 2) - 1);
-				}
-				else
-				{
+				if (!autoCommit)
 					Assert.AreEqual(2 * (N + 2) - 1, policy.numOnCommit);
-				}
 				
 				IndexSearcher searcher2 = new IndexSearcher(dir);
-				Hits hits2 = searcher2.Search(query);
-				Assert.AreEqual(176, hits2.Length());
+				ScoreDoc[] hits2 = searcher2.Search(query, null, 1000).scoreDocs;
+				Assert.AreEqual(176, hits2.Length);
 				
 				// Simplistic check: just verify only the past N segments_N's still
 				// exist, and, I can open a reader on each:
@@ -614,7 +637,7 @@
 						if (!autoCommit)
 						{
 							searcher2 = new IndexSearcher(reader);
-							hits2 = searcher2.Search(query);
+							hits2 = searcher2.Search(query, null, 1000).scoreDocs;
 							if (i > 1)
 							{
 								if (i % 2 == 0)
@@ -626,7 +649,7 @@
 									expectedCount -= 17;
 								}
 							}
-							Assert.AreEqual(expectedCount, hits2.Length());
+							Assert.AreEqual(expectedCount, hits2.Length);
 							searcher2.Close();
 						}
 						reader.Close();
@@ -695,8 +718,8 @@
 					reader.DeleteDocument(3);
 					reader.SetNorm(5, "content", 2.0F);
 					IndexSearcher searcher = new IndexSearcher(reader);
-					Hits hits = searcher.Search(query);
-					Assert.AreEqual(16, hits.Length());
+					ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+					Assert.AreEqual(16, hits.Length);
 					// this is a commit when autoCommit=false:
 					reader.Close();
 					searcher.Close();
@@ -708,18 +731,12 @@
 				}
 				
 				Assert.AreEqual(1 + 3 * (N + 1), policy.numOnInit);
-				if (autoCommit)
-				{
-					Assert.IsTrue(policy.numOnCommit > 3 * (N + 1) - 1);
-				}
-				else
-				{
+				if (!autoCommit)
 					Assert.AreEqual(2 * (N + 1), policy.numOnCommit);
-				}
 				
 				IndexSearcher searcher2 = new IndexSearcher(dir);
-				Hits hits2 = searcher2.Search(query);
-				Assert.AreEqual(0, hits2.Length());
+				ScoreDoc[] hits2 = searcher2.Search(query, null, 1000).scoreDocs;
+				Assert.AreEqual(0, hits2.Length);
 				
 				// Simplistic check: just verify only the past N segments_N's still
 				// exist, and, I can open a reader on each:
@@ -740,8 +757,8 @@
 						if (!autoCommit)
 						{
 							searcher2 = new IndexSearcher(reader);
-							hits2 = searcher2.Search(query);
-							Assert.AreEqual(expectedCount, hits2.Length());
+							hits2 = searcher2.Search(query, null, 1000).scoreDocs;
+							Assert.AreEqual(expectedCount, hits2.Length);
 							searcher2.Close();
 							if (expectedCount == 0)
 							{
@@ -783,7 +800,7 @@
 		private void  AddDoc(IndexWriter writer)
 		{
 			Document doc = new Document();
-			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.TOKENIZED));
+			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 		}
 	}

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestDoc.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestDoc.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestDoc.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestDoc.cs Wed Jul 29 18:04:12 2009
@@ -19,15 +19,14 @@
 
 using NUnit.Framework;
 
-//using TestRunner = junit.textui.TestRunner;
-using Document = Lucene.Net.Documents.Document;
-using Directory = Lucene.Net.Store.Directory;
-using FSDirectory = Lucene.Net.Store.FSDirectory;
-using Analyzer = Lucene.Net.Analysis.Analyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using FSDirectory = Lucene.Net.Store.FSDirectory;
+using Directory = Lucene.Net.Store.Directory;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
 using FileDocument = Lucene.Net.Demo.FileDocument;
-using Similarity = Lucene.Net.Search.Similarity;
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Index
 {
@@ -139,7 +138,7 @@
 			System.IO.StreamWriter out_Renamed = new System.IO.StreamWriter(sw);
 			
 			Directory directory = FSDirectory.GetDirectory(indexDir);
-			IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
 			SegmentInfo si1 = IndexDoc(writer, "test.txt");
 			PrintSegment(out_Renamed, si1);
@@ -147,7 +146,6 @@
 			SegmentInfo si2 = IndexDoc(writer, "test2.txt");
 			PrintSegment(out_Renamed, si2);
 			writer.Close();
-			directory.Close();
 			
 			SegmentInfo siMerge = Merge(si1, si2, "merge", false);
 			PrintSegment(out_Renamed, siMerge);
@@ -157,7 +155,8 @@
 			
 			SegmentInfo siMerge3 = Merge(siMerge, siMerge2, "merge3", false);
 			PrintSegment(out_Renamed, siMerge3);
-			
+
+            directory.Close();
 			out_Renamed.Close();
 			sw.Close();
 			System.String multiFileOutput = System.Text.ASCIIEncoding.ASCII.GetString(sw.ToArray());
@@ -167,7 +166,7 @@
 			out_Renamed = new System.IO.StreamWriter(sw);
 			
 			directory = FSDirectory.GetDirectory(indexDir);
-			writer = new IndexWriter(directory, new SimpleAnalyzer(), true);
+			writer = new IndexWriter(directory, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
 			si1 = IndexDoc(writer, "test.txt");
 			PrintSegment(out_Renamed, si1);
@@ -175,7 +174,6 @@
 			si2 = IndexDoc(writer, "test2.txt");
 			PrintSegment(out_Renamed, si2);
 			writer.Close();
-			directory.Close();
 			
 			siMerge = Merge(si1, si2, "merge", true);
 			PrintSegment(out_Renamed, siMerge);
@@ -185,18 +183,18 @@
 			
 			siMerge3 = Merge(siMerge, siMerge2, "merge3", true);
 			PrintSegment(out_Renamed, siMerge3);
-			
-			out_Renamed.Close();
+
+            directory.Close();
+            out_Renamed.Close();
 			sw.Close();
 			System.String singleFileOutput = System.Text.ASCIIEncoding.ASCII.GetString(sw.ToArray());
 			
 			Assert.AreEqual(multiFileOutput, singleFileOutput);
 		}
-		
-		
+				
 		private SegmentInfo IndexDoc(IndexWriter writer, System.String fileName)
 		{
-			System.IO.FileInfo file = new System.IO.FileInfo(workDir.FullName + "\\" + fileName);
+			System.IO.FileInfo file = new System.IO.FileInfo(System.IO.Path.Combine(workDir.FullName, fileName));
 			Document doc = FileDocument.Document(file);
 			writer.AddDocument(doc);
 			writer.Flush();

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestDocumentWriter.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs Wed Jul 29 18:04:12 2009
@@ -19,14 +19,23 @@
 
 using NUnit.Framework;
 
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using Token = Lucene.Net.Analysis.Token;
+using TokenFilter = Lucene.Net.Analysis.TokenFilter;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using WhitespaceTokenizer = Lucene.Net.Analysis.WhitespaceTokenizer;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Fieldable = Lucene.Net.Documents.Fieldable;
+using Index = Lucene.Net.Documents.Field.Index;
+using Store = Lucene.Net.Documents.Field.Store;
 using TermVector = Lucene.Net.Documents.Field.TermVector;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using Lucene.Net.Analysis;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using _TestUtil = Lucene.Net.Util._TestUtil;
 
 namespace Lucene.Net.Index
 {
@@ -98,35 +107,35 @@
 					return input.Next();
 				}
 				
-				public override Token Next(Token result)
+				public override Token Next(Token reusableToken)
 				{
 					if (buffered != null)
 					{
-						Token t = buffered;
+						Token nextToken = buffered;
 						buffered = null;
-						return t;
+						return nextToken;
 					}
-					Token t2 = input.Next(result);
-					if (t2 == null)
+					Token nextToken2 = input.Next(reusableToken);
+					if (nextToken2 == null)
 						return null;
-					if (System.Char.IsDigit(t2.TermBuffer()[0]))
+					if (System.Char.IsDigit(nextToken2.TermBuffer()[0]))
 					{
-						t2.SetPositionIncrement(t2.TermBuffer()[0] - '0');
+						nextToken2.SetPositionIncrement(nextToken2.TermBuffer()[0] - '0');
 					}
 					if (first)
 					{
 						// set payload on first position only
-						t2.SetPayload(new Payload(new byte[]{100}));
+						nextToken2.SetPayload(new Payload(new byte[]{100}));
 						first = false;
 					}
 					
 					// index a "synonym" for every token
-					buffered = (Token) t2.Clone();
+					buffered = (Token) nextToken2.Clone();
 					buffered.SetPayload(null);
 					buffered.SetPositionIncrement(0);
 					buffered.SetTermBuffer(new char[]{'b'}, 0, 1);
 					
-					return t2;
+					return nextToken2;
 				}
 			}
 			private void  InitBlock(TestDocumentWriter enclosingInstance)
@@ -170,15 +179,16 @@
 			private System.String[] tokens = new System.String[]{"term1", "term2", "term3", "term2"};
 			private int index = 0;
 			
-			public override Token Next()
+			public override Token Next(Token reusableToken)
 			{
+                System.Diagnostics.Debug.Assert(reusableToken != null);
 				if (index == tokens.Length)
 				{
 					return null;
 				}
 				else
 				{
-					return new Token(tokens[index++], 0, 0);
+					return reusableToken.Reinit(tokens[index++], 0, 0);
 				}
 			}
 		}
@@ -203,7 +213,7 @@
 			Document testDoc = new Document();
 			DocHelper.SetupDoc(testDoc);
 			Analyzer analyzer = new WhitespaceAnalyzer();
-			IndexWriter writer = new IndexWriter(dir, analyzer, true);
+			IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.AddDocument(testDoc);
 			writer.Flush();
 			SegmentInfo info = writer.NewestSegment();
@@ -242,7 +252,7 @@
 			for (int i = 0; i < reader.FieldInfos().Size(); i++)
 			{
 				FieldInfo fi = reader.FieldInfos().FieldInfo(i);
-				if (fi.IsIndexed())
+                if (fi.IsIndexed_ForNUnitTest())
 				{
 					Assert.IsTrue(fi.omitNorms == !reader.HasNorms(fi.Name_ForNUnitTest));
 				}
@@ -253,12 +263,12 @@
 		public virtual void  TestPositionIncrementGap()
 		{
 			Analyzer analyzer = new AnonymousClassAnalyzer(this);
-			
-			IndexWriter writer = new IndexWriter(dir, analyzer, true);
+
+            IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 			
 			Document doc = new Document();
-			doc.Add(new Field("repeated", "repeated one", Field.Store.YES, Field.Index.TOKENIZED));
-			doc.Add(new Field("repeated", "repeated two", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("repeated", "repeated one", Field.Store.YES, Field.Index.ANALYZED));
+			doc.Add(new Field("repeated", "repeated two", Field.Store.YES, Field.Index.ANALYZED));
 			
 			writer.AddDocument(doc);
 			writer.Flush();
@@ -278,11 +288,11 @@
 		public virtual void  TestTokenReuse()
 		{
 			Analyzer analyzer = new AnonymousClassAnalyzer1(this);
-			
-			IndexWriter writer = new IndexWriter(dir, analyzer, true);
+
+            IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 			
 			Document doc = new Document();
-			doc.Add(new Field("f1", "a 5 a a", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("f1", "a 5 a a", Field.Store.YES, Field.Index.ANALYZED));
 			
 			writer.AddDocument(doc);
 			writer.Flush();
@@ -306,7 +316,7 @@
 		[Test]
 		public virtual void  TestPreAnalyzedField()
 		{
-			IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true);
+            IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			Document doc = new Document();
 			
 			doc.Add(new Field("preanalyzed", new AnonymousClassTokenStream(this), TermVector.NO));
@@ -342,16 +352,18 @@
 		{
 			Document doc = new Document();
 			// f1 first without tv then with tv
-			doc.Add(new Field("f1", "v1", Field.Store.YES, Field.Index.UN_TOKENIZED, TermVector.NO));
-			doc.Add(new Field("f1", "v2", Field.Store.YES, Field.Index.UN_TOKENIZED, TermVector.WITH_POSITIONS_OFFSETS));
+			doc.Add(new Field("f1", "v1", Field.Store.YES, Field.Index.NOT_ANALYZED, TermVector.NO));
+			doc.Add(new Field("f1", "v2", Field.Store.YES, Field.Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
 			// f2 first with tv then without tv
-			doc.Add(new Field("f2", "v1", Field.Store.YES, Field.Index.UN_TOKENIZED, TermVector.WITH_POSITIONS_OFFSETS));
-			doc.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.UN_TOKENIZED, TermVector.NO));
+			doc.Add(new Field("f2", "v1", Field.Store.YES, Field.Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
+			doc.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.NOT_ANALYZED, TermVector.NO));
 			
 			RAMDirectory ram = new RAMDirectory();
-			IndexWriter writer = new IndexWriter(ram, new StandardAnalyzer(), true);
+            IndexWriter writer = new IndexWriter(ram, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.AddDocument(doc);
 			writer.Close();
+
+            _TestUtil.CheckIndex(ram);
 			
 			IndexReader reader = IndexReader.Open(ram);
 			// f1
@@ -364,4 +376,4 @@
 			Assert.AreEqual(2, tfv2.GetTerms().Length, "the 'with_tv' setting should rule!");
 		}
 	}
-}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldInfos.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestFieldInfos.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldInfos.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldInfos.cs Wed Jul 29 18:04:12 2009
@@ -96,4 +96,4 @@
 			}
 		}
 	}
-}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldsReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestFieldsReader.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldsReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldsReader.cs Wed Jul 29 18:04:12 2009
@@ -19,18 +19,17 @@
 
 using NUnit.Framework;
 
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Lucene.Net.Documents;
-using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
 using FSDirectory = Lucene.Net.Store.FSDirectory;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using Similarity = Lucene.Net.Search.Similarity;
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using _TestUtil = Lucene.Net.Util._TestUtil;
 using IndexInput = Lucene.Net.Store.IndexInput;
 using IndexOutput = Lucene.Net.Store.IndexOutput;
 using Directory = Lucene.Net.Store.Directory;
 using BufferedIndexInput = Lucene.Net.Store.BufferedIndexInput;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
+using _TestUtil = Lucene.Net.Util._TestUtil;
 
 namespace Lucene.Net.Index
 {
@@ -80,7 +79,7 @@
 			fieldInfos = new FieldInfos();
 			DocHelper.SetupDoc(testDoc);
 			fieldInfos.Add(testDoc);
-			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetUseCompoundFile(false);
 			writer.AddDocument(testDoc);
 			writer.Close();
@@ -148,6 +147,7 @@
 			field = doc.GetFieldable(DocHelper.COMPRESSED_TEXT_FIELD_2_KEY);
 			Assert.IsTrue(field != null, "field is null and it shouldn't be");
 			Assert.IsTrue(field.IsLazy(), "field is not lazy and it should be");
+            Assert.IsTrue(field.BinaryValue() == null, "binary value isn't null for lazy string field");
 			value_Renamed = field.StringValue();
 			Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be");
 			Assert.IsTrue(value_Renamed.Equals(DocHelper.FIELD_2_COMPRESSED_TEXT) == true, value_Renamed + " is not equal to " + DocHelper.FIELD_2_COMPRESSED_TEXT);
@@ -166,6 +166,8 @@
 			
 			field = doc.GetFieldable(DocHelper.LAZY_FIELD_BINARY_KEY);
 			Assert.IsTrue(field != null, "field is null and it shouldn't be");
+            Assert.IsTrue(field.StringValue() == null, "stringValue isn't null for lazy binary field");
+
 			byte[] bytes = field.BinaryValue();
 			Assert.IsTrue(bytes != null, "bytes is null and it shouldn't be");
 			Assert.IsTrue(DocHelper.LAZY_FIELD_BINARY_BYTES.Length == bytes.Length, "");
@@ -201,7 +203,7 @@
 			reader.Close();
 			try
 			{
-				System.String value_Renamed = field.StringValue();
+				field.StringValue();
 				Assert.Fail("did not hit AlreadyClosedException as expected");
 			}
 			catch (AlreadyClosedException)
@@ -250,8 +252,8 @@
 			_TestUtil.RmDir(file);
 			FSDirectory tmpDir = FSDirectory.GetDirectory(file);
 			Assert.IsTrue(tmpDir != null);
-			
-			IndexWriter writer = new IndexWriter(tmpDir, new WhitespaceAnalyzer(), true);
+
+            IndexWriter writer = new IndexWriter(tmpDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetUseCompoundFile(false);
 			writer.AddDocument(testDoc);
 			writer.Close();
@@ -420,6 +422,10 @@
             {
                 indexInput.Close();
             }
+            public override object Clone()
+            {
+                return new FaultyIndexInput((IndexInput)indexInput.Clone());
+            }
         }
 
         // LUCENE-1262
@@ -433,7 +439,7 @@
             try
             {
                 Directory dir = new FaultyFSDirectory(indexDir);
-                IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+                IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
                 for (int i = 0; i < 2; i++)
                     writer.AddDocument(testDoc);
                 writer.Optimize();

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestFilterIndexReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestFilterIndexReader.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestFilterIndexReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestFilterIndexReader.cs Wed Jul 29 18:04:12 2009
@@ -106,18 +106,18 @@
 		public virtual void  TestFilterIndexReader_Renamed_Method()
 		{
 			RAMDirectory directory = new MockRAMDirectory();
-			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
 			Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
-			d1.Add(new Field("default", "one two", Field.Store.YES, Field.Index.TOKENIZED));
+			d1.Add(new Field("default", "one two", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(d1);
 			
 			Lucene.Net.Documents.Document d2 = new Lucene.Net.Documents.Document();
-			d2.Add(new Field("default", "one three", Field.Store.YES, Field.Index.TOKENIZED));
+			d2.Add(new Field("default", "one three", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(d2);
 			
 			Lucene.Net.Documents.Document d3 = new Lucene.Net.Documents.Document();
-			d3.Add(new Field("default", "two four", Field.Store.YES, Field.Index.TOKENIZED));
+			d3.Add(new Field("default", "two four", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(d3);
 			
 			writer.Close();

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexFileDeleter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexFileDeleter.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexFileDeleter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexFileDeleter.cs Wed Jul 29 18:04:12 2009
@@ -19,17 +19,14 @@
 
 using NUnit.Framework;
 
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Directory = Lucene.Net.Store.Directory;
 using IndexInput = Lucene.Net.Store.IndexInput;
 using IndexOutput = Lucene.Net.Store.IndexOutput;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using Hits = Lucene.Net.Search.Hits;
-using IndexSearcher = Lucene.Net.Search.IndexSearcher;
-using TermQuery = Lucene.Net.Search.TermQuery;
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
 
 namespace Lucene.Net.Index
 {
@@ -47,7 +44,7 @@
 			
 			Directory dir = new RAMDirectory();
 			
-			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetMaxBufferedDocs(10);
 			int i;
 			for (i = 0; i < 35; i++)
@@ -75,10 +72,10 @@
 			// .s0 file:
 			System.String[] files = dir.List();
 			
-			/*
-			for(int i=0;i<files.length;i++) {
-			System.out.println(i + ": " + files[i]);
+			for(int j=0;j<files.Length;j++) {
+			System.Console.Out.WriteLine(j + ": " + files[j]);
 			}
+			/*
 			*/
 			
 			// The numbering of fields can vary depending on which
@@ -146,8 +143,8 @@
 			CopyFile(dir, "_0.cfs", "deletable");
 			
 			// Create some old segments file:
-			CopyFile(dir, "segments_a", "segments");
-			CopyFile(dir, "segments_a", "segments_2");
+			CopyFile(dir, "segments_3", "segments");
+			CopyFile(dir, "segments_3", "segments_2");
 			
 			// Create a bogus cfs file shadowing a non-cfs segment:
 			CopyFile(dir, "_2.cfs", "_3.cfs");
@@ -156,7 +153,7 @@
 			
 			// Open & close a writer: it should delete the above 4
 			// files and nothing more:
-			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
+            writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
 			writer.Close();
 			
 			System.String[] files2 = dir.List();
@@ -205,8 +202,8 @@
 		private void  AddDoc(IndexWriter writer, int id)
 		{
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.TOKENIZED));
-			doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.UN_TOKENIZED));
+			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
+			doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.NOT_ANALYZED));
 			writer.AddDocument(doc);
 		}
 

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexInput.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexInput.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexInput.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexInput.cs Wed Jul 29 18:04:12 2009
@@ -28,17 +28,75 @@
 	public class TestIndexInput : LuceneTestCase
 	{
 		[Test]
-		public virtual void  TestRead()
-		{
-			IndexInput is_Renamed = new MockIndexInput(new byte[]{(byte) (0x80), (byte) (0x01), (byte) (0xFF), (byte) (0x7F), (byte) (0x80), (byte) (0x80), (byte) (0x01), (byte) (0x81), (byte) (0x80), (byte) (0x01), (byte) (0x06), (byte) 'L', (byte) 'u', (byte) 'c', (byte) 'e', (byte) 'n', (byte) 'e'});
-			Assert.AreEqual(128, is_Renamed.ReadVInt());
-			Assert.AreEqual(16383, is_Renamed.ReadVInt());
-			Assert.AreEqual(16384, is_Renamed.ReadVInt());
-			Assert.AreEqual(16385, is_Renamed.ReadVInt());
-			Assert.AreEqual("Lucene", is_Renamed.ReadString());
-		}
-		
-		/// <summary> Expert
+        public void TestRead()
+        {
+            IndexInput is_Renamed = new MockIndexInput(
+                new byte[] { 
+                    (byte) 0x80, 0x01,
+                    (byte) 0xFF, 0x7F,
+                    (byte) 0x80, (byte) 0x80, 0x01,
+                    (byte) 0x81, (byte) 0x80, 0x01,
+                    0x06, (byte) 'L', (byte) 'u', (byte) 'c', (byte) 'e', (byte) 'n', (byte) 'e',
+
+                    // 2-byte UTF-8 (U+00BF "INVERTED QUESTION MARK") 
+                    0x02, (byte) 0xC2, (byte) 0xBF,
+                    0x0A, (byte) 'L', (byte) 'u', (byte) 0xC2, (byte) 0xBF, 
+                        (byte) 'c', (byte) 'e', (byte) 0xC2, (byte) 0xBF, 
+                        (byte) 'n', (byte) 'e',
+
+                    // 3-byte UTF-8 (U+2620 "SKULL AND CROSSBONES") 
+                    0x03, (byte) 0xE2, (byte) 0x98, (byte) 0xA0,
+                    0x0C, (byte) 'L', (byte) 'u', (byte) 0xE2, (byte) 0x98, (byte) 0xA0,
+                        (byte) 'c', (byte) 'e', (byte) 0xE2, (byte) 0x98, (byte) 0xA0,
+                        (byte) 'n', (byte) 'e',
+
+                    // surrogate pairs
+                    // (U+1D11E "MUSICAL SYMBOL G CLEF")
+                    // (U+1D160 "MUSICAL SYMBOL EIGHTH NOTE")
+                    0x04, (byte) 0xF0, (byte) 0x9D, (byte) 0x84, (byte) 0x9E,
+                    0x08, (byte) 0xF0, (byte) 0x9D, (byte) 0x84, (byte) 0x9E, 
+                        (byte) 0xF0, (byte) 0x9D, (byte) 0x85, (byte) 0xA0, 
+                    0x0E, (byte) 'L', (byte) 'u',
+                        (byte) 0xF0, (byte) 0x9D, (byte) 0x84, (byte) 0x9E,
+                        (byte) 'c', (byte) 'e', 
+                        (byte) 0xF0, (byte) 0x9D, (byte) 0x85, (byte) 0xA0, 
+                        (byte) 'n', (byte) 'e',  
+
+                    // null bytes
+                    0x01, 0x00,
+                    0x08, (byte) 'L', (byte) 'u', 0x00, (byte) 'c', (byte) 'e', 0x00, (byte) 'n', (byte) 'e',
+
+                    // Modified UTF-8 null bytes
+                    0x02, (byte) 0xC0, (byte) 0x80,
+                    0x0A, (byte) 'L', (byte) 'u', (byte) 0xC0, (byte) 0x80, 
+                        (byte) 'c', (byte) 'e', (byte) 0xC0, (byte) 0x80, 
+                        (byte) 'n', (byte) 'e',
+                });
+
+            Assert.AreEqual(128, is_Renamed.ReadVInt());
+            Assert.AreEqual(16383, is_Renamed.ReadVInt());
+            Assert.AreEqual(16384, is_Renamed.ReadVInt());
+            Assert.AreEqual(16385, is_Renamed.ReadVInt());
+            Assert.AreEqual("Lucene", is_Renamed.ReadString());
+
+            Assert.AreEqual("\u00BF", is_Renamed.ReadString());
+            Assert.AreEqual("Lu\u00BFce\u00BFne", is_Renamed.ReadString());
+
+            Assert.AreEqual("\u2620", is_Renamed.ReadString());
+            Assert.AreEqual("Lu\u2620ce\u2620ne", is_Renamed.ReadString());
+
+            Assert.AreEqual("\uD834\uDD1E", is_Renamed.ReadString());
+            Assert.AreEqual("\uD834\uDD1E\uD834\uDD60", is_Renamed.ReadString());
+            Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne", is_Renamed.ReadString());
+
+            Assert.AreEqual("\u0000", is_Renamed.ReadString());
+            Assert.AreEqual("Lu\u0000ce\u0000ne", is_Renamed.ReadString());
+
+            Assert.AreEqual("\u0000", is_Renamed.ReadString());
+            Assert.AreEqual("Lu\u0000ce\u0000ne", is_Renamed.ReadString());
+        }
+
+        /// <summary> Expert
 		/// 
 		/// </summary>
 		/// <throws>  IOException </throws>

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexModifier.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexModifier.cs?rev=798995&r1=798994&r2=798995&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexModifier.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexModifier.cs Wed Jul 29 18:04:12 2009
@@ -38,8 +38,6 @@
 	/// same time.
 	/// 
 	/// </summary>
-	/// <author>  Daniel Naber
-	/// </author>
 	/// <deprecated>
 	/// </deprecated>
 	[TestFixture]
@@ -137,8 +135,8 @@
 		private Lucene.Net.Documents.Document GetDoc()
 		{
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-			doc.Add(new Field("body", System.Convert.ToString(docCount), Field.Store.YES, Field.Index.UN_TOKENIZED));
-			doc.Add(new Field("all", "x", Field.Store.YES, Field.Index.UN_TOKENIZED));
+			doc.Add(new Field("body", System.Convert.ToString(docCount), Field.Store.YES, Field.Index.NOT_ANALYZED));
+			doc.Add(new Field("all", "x", Field.Store.YES, Field.Index.NOT_ANALYZED));
 			docCount++;
 			return doc;
 		}
@@ -267,11 +265,12 @@
 	class IndexThread : SupportClass.ThreadClass
 	{
 		
-		private const int ITERATIONS = 500; // iterations of thread test
+		private const int TEST_SECONDS = 3; // how many seconds to tun each test
 		
 		internal static int id = 0;
-		internal static System.Collections.ArrayList idStack = new System.Collections.ArrayList();
-		
+		//internal static System.Collections.ArrayList idStack = new System.Collections.ArrayList();
+        internal static System.Collections.Stack idStack = new System.Collections.Stack();
+
 		internal int added = 0;
 		internal int deleted = 0;
 		
@@ -291,10 +290,11 @@
 		
 		override public void  Run()
 		{
+            System.DateTime endTime = System.DateTime.Now.AddSeconds(3.0);
 			try
 			{
-				for (int i = 0; i < ITERATIONS; i++)
-				{
+                while (System.DateTime.Now < endTime)
+                {
 					int rand = random.Next(101);
 					if (rand < 5)
 					{
@@ -304,11 +304,11 @@
 					{
 						Lucene.Net.Documents.Document doc = GetDocument();
 						index.AddDocument(doc);
-                        lock (idStack)
+                        lock (idStack.SyncRoot)
                         {
-                            idStack.Add(doc.Get("id"));
-                            added++;
+                            idStack.Push(doc.Get("id"));
                         }
+						added++;
 					}
 					else
 					{
@@ -317,13 +317,12 @@
 						System.String delId = null;
 						try
 						{
-                            lock (idStack)
+                            lock (idStack.SyncRoot)
                             {
-                                delId = idStack[idStack.Count - 1] as System.String;
-                                idStack.RemoveAt(idStack.Count - 1);
+                                delId = (string)idStack.Pop();
                             }
 						}
-						catch (System.ArgumentOutOfRangeException)
+						catch (System.InvalidOperationException)
 						{
 							continue;
 						}
@@ -361,13 +360,13 @@
 			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
 			lock (GetType())
 			{
-				doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.UN_TOKENIZED));
+				doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.NOT_ANALYZED));
 				id++;
 			}
 			// add random stuff:
-			doc.Add(new Field("content", System.Convert.ToString(random.Next(1000)), Field.Store.YES, Field.Index.TOKENIZED));
-			doc.Add(new Field("content", System.Convert.ToString(random.Next(1000)), Field.Store.YES, Field.Index.TOKENIZED));
-			doc.Add(new Field("all", "x", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("content", System.Convert.ToString(random.Next(1000)), Field.Store.YES, Field.Index.ANALYZED));
+			doc.Add(new Field("content", System.Convert.ToString(random.Next(1000)), Field.Store.YES, Field.Index.ANALYZED));
+			doc.Add(new Field("all", "x", Field.Store.YES, Field.Index.ANALYZED));
 			return doc;
 		}
 	}